diff --git a/premake5.lua b/premake5.lua index 54277ad27..94df2fde5 100644 --- a/premake5.lua +++ b/premake5.lua @@ -169,6 +169,7 @@ solution("xenia") include("third_party/libav.lua") include("third_party/snappy.lua") include("third_party/spirv-tools.lua") + include("third_party/vulkan/loader") include("third_party/xxhash.lua") include("third_party/yaml-cpp.lua") @@ -188,6 +189,7 @@ solution("xenia") include("src/xenia/ui") include("src/xenia/ui/gl") include("src/xenia/ui/spirv") + include("src/xenia/ui/vulkan") include("src/xenia/vfs") if os.is("windows") then diff --git a/src/xenia/gpu/spirv_shader_translator.cc b/src/xenia/gpu/spirv_shader_translator.cc index 7919a3fe3..b9af44c22 100644 --- a/src/xenia/gpu/spirv_shader_translator.cc +++ b/src/xenia/gpu/spirv_shader_translator.cc @@ -24,7 +24,7 @@ void SpirvShaderTranslator::StartTranslation() { auto fn = e.MakeMainEntry(); auto float_1_0 = e.MakeFloatConstant(1.0f); auto acos = e.CreateGlslStd450InstructionCall( - spv::Decoration::Invariant, e.MakeFloatType(32), spv::GLSLstd450::Acos, + spv::Decoration::Invariant, e.MakeFloatType(32), spv::GLSLstd450::kAcos, {float_1_0}); e.MakeReturn(true); } @@ -188,7 +188,7 @@ spv::Id SpirvShaderTranslator::LoadFromOperand(const InstructionOperand& op) { if (op.is_absolute_value) { current_value_id = e.CreateGlslStd450InstructionCall( spv::Decoration::RelaxedPrecision, current_type_id, - spv::GLSLstd450::FAbs, {current_value_id}); + spv::GLSLstd450::kFAbs, {current_value_id}); } if (op.is_negated) { current_value_id = diff --git a/src/xenia/ui/spirv/spirv_assembler.cc b/src/xenia/ui/spirv/spirv_assembler.cc index 823779e5f..4ade9204b 100644 --- a/src/xenia/ui/spirv/spirv_assembler.cc +++ b/src/xenia/ui/spirv/spirv_assembler.cc @@ -9,7 +9,7 @@ #include "xenia/ui/spirv/spirv_assembler.h" -#include "third_party/spirv-tools/include/libspirv/libspirv.h" +#include "third_party/spirv-tools/include/spirv-tools/libspirv.h" #include "xenia/base/logging.h" namespace xe { diff --git a/src/xenia/ui/spirv/spirv_disassembler.cc b/src/xenia/ui/spirv/spirv_disassembler.cc index b119ac734..d213582ae 100644 --- a/src/xenia/ui/spirv/spirv_disassembler.cc +++ b/src/xenia/ui/spirv/spirv_disassembler.cc @@ -9,7 +9,7 @@ #include "xenia/ui/spirv/spirv_disassembler.h" -#include "third_party/spirv-tools/include/libspirv/libspirv.h" +#include "third_party/spirv-tools/include/spirv-tools/libspirv.h" #include "xenia/base/logging.h" namespace xe { diff --git a/src/xenia/ui/spirv/spirv_util.h b/src/xenia/ui/spirv/spirv_util.h index 5a3bb52f8..ac7a9a05e 100644 --- a/src/xenia/ui/spirv/spirv_util.h +++ b/src/xenia/ui/spirv/spirv_util.h @@ -10,8 +10,8 @@ #ifndef XENIA_UI_SPIRV_SPIRV_UTIL_H_ #define XENIA_UI_SPIRV_SPIRV_UTIL_H_ -#include "third_party/spirv/GLSL.std.450.h" -#include "third_party/spirv/spirv.h" +#include "third_party/spirv/GLSL.std.450.hpp11" +#include "third_party/spirv/spirv.hpp11" // Forward declarations from SPIRV-Tools so we don't pollute /so/ much. struct spv_binary_t; diff --git a/src/xenia/ui/vulkan/premake5.lua b/src/xenia/ui/vulkan/premake5.lua new file mode 100644 index 000000000..2144ca30a --- /dev/null +++ b/src/xenia/ui/vulkan/premake5.lua @@ -0,0 +1,53 @@ +project_root = "../../../.." +include(project_root.."/tools/build") + +group("src") +project("xenia-ui-vulkan") + uuid("4933d81e-1c2c-4d5d-b104-3c0eb9dc2f00") + kind("StaticLib") + language("C++") + links({ + "xenia-base", + "xenia-ui", + "xenia-ui-spirv", + }) + defines({ + }) + includedirs({ + project_root.."/third_party/gflags/src", + project_root.."/third_party/vulkan/", + }) + local_platform_files() + removefiles({"*_demo.cc"}) + +group("demos") +project("xenia-ui-window-vulkan-demo") + uuid("97598f13-3177-454c-8e58-c59e2b6ede27") + kind("WindowedApp") + language("C++") + links({ + "gflags", + "imgui", + "vulkan-loader", + "xenia-base", + "xenia-ui", + "xenia-ui-spirv", + "xenia-ui-vulkan", + }) + flags({ + "WinMain", -- Use WinMain instead of main. + }) + defines({ + }) + includedirs({ + project_root.."/third_party/gflags/src", + project_root.."/third_party/vulkan/", + }) + files({ + "../window_demo.cc", + "vulkan_window_demo.cc", + project_root.."/src/xenia/base/main_"..platform_suffix..".cc", + }) + resincludedirs({ + project_root, + }) diff --git a/src/xenia/ui/vulkan/shaders/build.bat b/src/xenia/ui/vulkan/shaders/build.bat new file mode 100644 index 000000000..c3e0322b0 --- /dev/null +++ b/src/xenia/ui/vulkan/shaders/build.bat @@ -0,0 +1,2 @@ +glslangValidator -V immediate.vert -o immediate.vert.spv +glslangValidator -V immediate.frag -o immediate.frag.spv diff --git a/src/xenia/ui/vulkan/shaders/immediate.frag b/src/xenia/ui/vulkan/shaders/immediate.frag new file mode 100644 index 000000000..b5fcdda35 --- /dev/null +++ b/src/xenia/ui/vulkan/shaders/immediate.frag @@ -0,0 +1,23 @@ +#version 450 core +precision highp float; + +layout(push_constant) uniform PushConstants { + mat4 projection_matrix; + int restrict_texture_samples; +} push_constants; + +layout(set = 0, binding = 0) uniform sampler2D texture_sampler; + +layout(location = 0) in vec2 vtx_uv; +layout(location = 1) in vec4 vtx_color; + +layout(location = 0) out vec4 out_color; + +void main() { + out_color = vtx_color; + if (push_constants.restrict_texture_samples == 0 || vtx_uv.x <= 1.0) { + vec4 tex_color = texture(texture_sampler, vtx_uv); + out_color *= tex_color; + // TODO(benvanik): microprofiler shadows. + } +} diff --git a/src/xenia/ui/vulkan/shaders/immediate.frag.h b/src/xenia/ui/vulkan/shaders/immediate.frag.h new file mode 100644 index 000000000..e1efd613a --- /dev/null +++ b/src/xenia/ui/vulkan/shaders/immediate.frag.h @@ -0,0 +1,124 @@ +const uint8_t immediate_frag_spv[] = { + 0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x08, 0x00, + 0x35, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x06, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x47, 0x4C, 0x53, 0x4C, 0x2E, 0x73, 0x74, 0x64, 0x2E, 0x34, 0x35, 0x30, + 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x08, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E, 0x00, 0x00, 0x00, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x03, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00, 0xC2, 0x01, 0x00, 0x00, + 0x05, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E, + 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00, 0x09, 0x00, 0x00, 0x00, + 0x6F, 0x75, 0x74, 0x5F, 0x63, 0x6F, 0x6C, 0x6F, 0x72, 0x00, 0x00, 0x00, + 0x05, 0x00, 0x05, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x76, 0x74, 0x78, 0x5F, + 0x63, 0x6F, 0x6C, 0x6F, 0x72, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x50, 0x75, 0x73, 0x68, 0x43, 0x6F, 0x6E, 0x73, + 0x74, 0x61, 0x6E, 0x74, 0x73, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x72, 0x6F, 0x6A, + 0x65, 0x63, 0x74, 0x69, 0x6F, 0x6E, 0x5F, 0x6D, 0x61, 0x74, 0x72, 0x69, + 0x78, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x74, 0x72, 0x69, 0x63, 0x74, + 0x5F, 0x74, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x5F, 0x73, 0x61, 0x6D, + 0x70, 0x6C, 0x65, 0x73, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, + 0x12, 0x00, 0x00, 0x00, 0x70, 0x75, 0x73, 0x68, 0x5F, 0x63, 0x6F, 0x6E, + 0x73, 0x74, 0x61, 0x6E, 0x74, 0x73, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00, + 0x1E, 0x00, 0x00, 0x00, 0x76, 0x74, 0x78, 0x5F, 0x75, 0x76, 0x00, 0x00, + 0x05, 0x00, 0x05, 0x00, 0x2A, 0x00, 0x00, 0x00, 0x74, 0x65, 0x78, 0x5F, + 0x63, 0x6F, 0x6C, 0x6F, 0x72, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, + 0x2E, 0x00, 0x00, 0x00, 0x74, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x5F, + 0x73, 0x61, 0x6D, 0x70, 0x6C, 0x65, 0x72, 0x00, 0x47, 0x00, 0x04, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x47, 0x00, 0x04, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x48, 0x00, 0x05, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x23, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, + 0x12, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x47, 0x00, 0x04, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x2E, 0x00, 0x00, 0x00, + 0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, + 0x2E, 0x00, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00, 0x21, 0x00, 0x03, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, + 0x0A, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x3B, 0x00, 0x04, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x02, 0x00, 0x0D, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x04, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x15, 0x00, 0x04, 0x00, 0x0F, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x04, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x11, 0x00, 0x00, 0x00, + 0x12, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x04, 0x00, + 0x0F, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x14, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, + 0x0F, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x04, 0x00, 0x0F, 0x00, 0x00, 0x00, + 0x17, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, + 0x1C, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x1D, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x1C, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x1D, 0x00, 0x00, 0x00, + 0x1E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x15, 0x00, 0x04, 0x00, + 0x1F, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x2B, 0x00, 0x04, 0x00, 0x1F, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x21, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x04, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3F, + 0x20, 0x00, 0x04, 0x00, 0x29, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x19, 0x00, 0x09, 0x00, 0x2B, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x1B, 0x00, 0x03, 0x00, 0x2C, 0x00, 0x00, 0x00, + 0x2B, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x2D, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, + 0x2D, 0x00, 0x00, 0x00, 0x2E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00, + 0x05, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x29, 0x00, 0x00, 0x00, + 0x2A, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, + 0x3E, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, + 0x41, 0x00, 0x05, 0x00, 0x14, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, + 0x12, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x0F, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, + 0xAA, 0x00, 0x05, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, + 0x16, 0x00, 0x00, 0x00, 0x17, 0x00, 0x00, 0x00, 0xA8, 0x00, 0x04, 0x00, + 0x0D, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, + 0xF7, 0x00, 0x03, 0x00, 0x1B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0xFA, 0x00, 0x04, 0x00, 0x19, 0x00, 0x00, 0x00, 0x1A, 0x00, 0x00, 0x00, + 0x1B, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00, 0x1A, 0x00, 0x00, 0x00, + 0x41, 0x00, 0x05, 0x00, 0x21, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, + 0x1E, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, + 0xBC, 0x00, 0x05, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, + 0x23, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0xF9, 0x00, 0x02, 0x00, + 0x1B, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00, 0x1B, 0x00, 0x00, 0x00, + 0xF5, 0x00, 0x07, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x26, 0x00, 0x00, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, + 0x1A, 0x00, 0x00, 0x00, 0xF7, 0x00, 0x03, 0x00, 0x28, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0xFA, 0x00, 0x04, 0x00, 0x26, 0x00, 0x00, 0x00, + 0x27, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00, + 0x27, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, 0x2C, 0x00, 0x00, 0x00, + 0x2F, 0x00, 0x00, 0x00, 0x2E, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x1C, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, + 0x57, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x31, 0x00, 0x00, 0x00, + 0x2F, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x3E, 0x00, 0x03, 0x00, + 0x2A, 0x00, 0x00, 0x00, 0x31, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x32, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, + 0x3D, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00, 0x33, 0x00, 0x00, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x85, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x34, 0x00, 0x00, 0x00, 0x33, 0x00, 0x00, 0x00, 0x32, 0x00, 0x00, 0x00, + 0x3E, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00, 0x34, 0x00, 0x00, 0x00, + 0xF9, 0x00, 0x02, 0x00, 0x28, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00, + 0x28, 0x00, 0x00, 0x00, 0xFD, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00, +}; diff --git a/src/xenia/ui/vulkan/shaders/immediate.frag.spv b/src/xenia/ui/vulkan/shaders/immediate.frag.spv new file mode 100644 index 000000000..9e0e6bec7 Binary files /dev/null and b/src/xenia/ui/vulkan/shaders/immediate.frag.spv differ diff --git a/src/xenia/ui/vulkan/shaders/immediate.vert b/src/xenia/ui/vulkan/shaders/immediate.vert new file mode 100644 index 000000000..732553dcf --- /dev/null +++ b/src/xenia/ui/vulkan/shaders/immediate.vert @@ -0,0 +1,21 @@ +#version 450 core +precision highp float; + +layout(push_constant) uniform PushConstants { + mat4 projection_matrix; + int restrict_texture_samples; +} push_constants; + +layout(location = 0) in vec2 in_pos; +layout(location = 1) in vec2 in_uv; +layout(location = 2) in vec4 in_color; + +layout(location = 0) out vec2 vtx_uv; +layout(location = 1) out vec4 vtx_color; + +void main() { + gl_Position = push_constants.projection_matrix * vec4(in_pos.xy, 0.0, 1.0); + gl_Position.y = -gl_Position.y; + vtx_uv = in_uv; + vtx_color = in_color; +} diff --git a/src/xenia/ui/vulkan/shaders/immediate.vert.h b/src/xenia/ui/vulkan/shaders/immediate.vert.h new file mode 100644 index 000000000..b454eb260 --- /dev/null +++ b/src/xenia/ui/vulkan/shaders/immediate.vert.h @@ -0,0 +1,136 @@ +const uint8_t immediate_vert_spv[] = { + 0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x08, 0x00, + 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x11, 0x00, 0x02, 0x00, 0x21, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x06, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x47, 0x4C, 0x53, 0x4C, 0x2E, 0x73, 0x74, 0x64, + 0x2E, 0x34, 0x35, 0x30, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x03, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x0B, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E, + 0x00, 0x00, 0x00, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, + 0x29, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, + 0x2E, 0x00, 0x00, 0x00, 0x03, 0x00, 0x03, 0x00, 0x02, 0x00, 0x00, 0x00, + 0xC2, 0x01, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x6D, 0x61, 0x69, 0x6E, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x67, 0x6C, 0x5F, 0x50, 0x65, 0x72, 0x56, 0x65, + 0x72, 0x74, 0x65, 0x78, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x06, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x67, 0x6C, 0x5F, 0x50, + 0x6F, 0x73, 0x69, 0x74, 0x69, 0x6F, 0x6E, 0x00, 0x06, 0x00, 0x07, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x67, 0x6C, 0x5F, 0x50, + 0x6F, 0x69, 0x6E, 0x74, 0x53, 0x69, 0x7A, 0x65, 0x00, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x07, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x67, 0x6C, 0x5F, 0x43, 0x6C, 0x69, 0x70, 0x44, 0x69, 0x73, 0x74, 0x61, + 0x6E, 0x63, 0x65, 0x00, 0x06, 0x00, 0x07, 0x00, 0x0B, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x67, 0x6C, 0x5F, 0x43, 0x75, 0x6C, 0x6C, 0x44, + 0x69, 0x73, 0x74, 0x61, 0x6E, 0x63, 0x65, 0x00, 0x05, 0x00, 0x03, 0x00, + 0x0D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, + 0x11, 0x00, 0x00, 0x00, 0x50, 0x75, 0x73, 0x68, 0x43, 0x6F, 0x6E, 0x73, + 0x74, 0x61, 0x6E, 0x74, 0x73, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, + 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70, 0x72, 0x6F, 0x6A, + 0x65, 0x63, 0x74, 0x69, 0x6F, 0x6E, 0x5F, 0x6D, 0x61, 0x74, 0x72, 0x69, + 0x78, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x11, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x72, 0x65, 0x73, 0x74, 0x72, 0x69, 0x63, 0x74, + 0x5F, 0x74, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x5F, 0x73, 0x61, 0x6D, + 0x70, 0x6C, 0x65, 0x73, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, + 0x13, 0x00, 0x00, 0x00, 0x70, 0x75, 0x73, 0x68, 0x5F, 0x63, 0x6F, 0x6E, + 0x73, 0x74, 0x61, 0x6E, 0x74, 0x73, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00, + 0x19, 0x00, 0x00, 0x00, 0x69, 0x6E, 0x5F, 0x70, 0x6F, 0x73, 0x00, 0x00, + 0x05, 0x00, 0x04, 0x00, 0x29, 0x00, 0x00, 0x00, 0x76, 0x74, 0x78, 0x5F, + 0x75, 0x76, 0x00, 0x00, 0x05, 0x00, 0x04, 0x00, 0x2A, 0x00, 0x00, 0x00, + 0x69, 0x6E, 0x5F, 0x75, 0x76, 0x00, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00, + 0x2C, 0x00, 0x00, 0x00, 0x76, 0x74, 0x78, 0x5F, 0x63, 0x6F, 0x6C, 0x6F, + 0x72, 0x00, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00, 0x2E, 0x00, 0x00, 0x00, + 0x69, 0x6E, 0x5F, 0x63, 0x6F, 0x6C, 0x6F, 0x72, 0x00, 0x00, 0x00, 0x00, + 0x48, 0x00, 0x05, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, 0x0B, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x48, 0x00, 0x05, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x48, 0x00, 0x04, 0x00, + 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, + 0x48, 0x00, 0x05, 0x00, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x23, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, + 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x00, 0x00, 0x48, 0x00, 0x05, 0x00, 0x11, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, + 0x47, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x47, 0x00, 0x04, 0x00, 0x13, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x19, 0x00, 0x00, 0x00, + 0x1E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, + 0x29, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x47, 0x00, 0x04, 0x00, 0x2A, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x2C, 0x00, 0x00, 0x00, + 0x1E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, + 0x2E, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00, 0x21, 0x00, 0x03, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x15, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, + 0x09, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x04, 0x00, + 0x0A, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, + 0x1E, 0x00, 0x06, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x0B, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x0C, 0x00, 0x00, 0x00, + 0x0D, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x15, 0x00, 0x04, 0x00, + 0x0E, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x2B, 0x00, 0x04, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x18, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x04, 0x00, + 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x12, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, + 0x11, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x12, 0x00, 0x00, 0x00, + 0x13, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x17, 0x00, 0x04, 0x00, 0x17, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, + 0x02, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x18, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x17, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, + 0x18, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x2B, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x1B, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, + 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3F, 0x20, 0x00, 0x04, 0x00, + 0x21, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x20, 0x00, 0x04, 0x00, 0x23, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x28, 0x00, 0x00, 0x00, + 0x03, 0x00, 0x00, 0x00, 0x17, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, + 0x28, 0x00, 0x00, 0x00, 0x29, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0x3B, 0x00, 0x04, 0x00, 0x18, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x21, 0x00, 0x00, 0x00, + 0x2C, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, + 0x2D, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x3B, 0x00, 0x04, 0x00, 0x2D, 0x00, 0x00, 0x00, 0x2E, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, + 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, + 0xF8, 0x00, 0x02, 0x00, 0x05, 0x00, 0x00, 0x00, 0x41, 0x00, 0x05, 0x00, + 0x14, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x13, 0x00, 0x00, 0x00, + 0x0F, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00, + 0x16, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x17, 0x00, 0x00, 0x00, 0x1A, 0x00, 0x00, 0x00, 0x19, 0x00, 0x00, 0x00, + 0x51, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x1D, 0x00, 0x00, 0x00, + 0x1A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x51, 0x00, 0x05, 0x00, + 0x06, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x1A, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x50, 0x00, 0x07, 0x00, 0x07, 0x00, 0x00, 0x00, + 0x1F, 0x00, 0x00, 0x00, 0x1D, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, + 0x1B, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x91, 0x00, 0x05, 0x00, + 0x07, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00, + 0x1F, 0x00, 0x00, 0x00, 0x41, 0x00, 0x05, 0x00, 0x21, 0x00, 0x00, 0x00, + 0x22, 0x00, 0x00, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, + 0x3E, 0x00, 0x03, 0x00, 0x22, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, + 0x41, 0x00, 0x06, 0x00, 0x23, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, + 0x0D, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, + 0x3D, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, + 0x24, 0x00, 0x00, 0x00, 0x7F, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, + 0x26, 0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, 0x41, 0x00, 0x06, 0x00, + 0x23, 0x00, 0x00, 0x00, 0x27, 0x00, 0x00, 0x00, 0x0D, 0x00, 0x00, 0x00, + 0x0F, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x3E, 0x00, 0x03, 0x00, + 0x27, 0x00, 0x00, 0x00, 0x26, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, + 0x17, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x00, 0x00, 0x2A, 0x00, 0x00, 0x00, + 0x3E, 0x00, 0x03, 0x00, 0x29, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x00, 0x00, + 0x3D, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00, 0x2F, 0x00, 0x00, 0x00, + 0x2E, 0x00, 0x00, 0x00, 0x3E, 0x00, 0x03, 0x00, 0x2C, 0x00, 0x00, 0x00, + 0x2F, 0x00, 0x00, 0x00, 0xFD, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00, +}; diff --git a/src/xenia/ui/vulkan/shaders/immediate.vert.spv b/src/xenia/ui/vulkan/shaders/immediate.vert.spv new file mode 100644 index 000000000..581d87bc6 Binary files /dev/null and b/src/xenia/ui/vulkan/shaders/immediate.vert.spv differ diff --git a/src/xenia/ui/vulkan/vulkan.cc b/src/xenia/ui/vulkan/vulkan.cc new file mode 100644 index 000000000..ba889e109 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan.cc @@ -0,0 +1,12 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan.h" + +DEFINE_bool(vulkan_validation, false, "Enable Vulkan validation layers."); diff --git a/src/xenia/ui/vulkan/vulkan.h b/src/xenia/ui/vulkan/vulkan.h new file mode 100644 index 000000000..697c9fa57 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan.h @@ -0,0 +1,34 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_H_ +#define XENIA_UI_VULKAN_VULKAN_H_ + +#include + +#include "xenia/base/platform.h" + +#if XE_PLATFORM_WIN32 +#define VK_USE_PLATFORM_WIN32_KHR 1 +#else +#error Platform not yet supported. +#endif // XE_PLATFORM_WIN32 + +// We are statically linked with the loader, so use function prototypes. +#define VK_PROTOTYPES +#include "third_party/vulkan/vulkan.h" + +// NOTE: header order matters here, unfortunately: +#include "third_party/vulkan/vk_lunarg_debug_marker.h" + +#define XELOGVK XELOGI + +DECLARE_bool(vulkan_validation); + +#endif // XENIA_UI_VULKAN_VULKAN_H_ diff --git a/src/xenia/ui/vulkan/vulkan_context.cc b/src/xenia/ui/vulkan/vulkan_context.cc new file mode 100644 index 000000000..5d82f4f46 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_context.cc @@ -0,0 +1,148 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_context.h" + +#include +#include + +#include "xenia/base/assert.h" +#include "xenia/base/logging.h" +#include "xenia/base/math.h" +#include "xenia/base/profiling.h" +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_device.h" +#include "xenia/ui/vulkan/vulkan_immediate_drawer.h" +#include "xenia/ui/vulkan/vulkan_instance.h" +#include "xenia/ui/vulkan/vulkan_provider.h" +#include "xenia/ui/vulkan/vulkan_swap_chain.h" +#include "xenia/ui/vulkan/vulkan_util.h" +#include "xenia/ui/window.h" + +namespace xe { +namespace ui { +namespace vulkan { + +VulkanContext::VulkanContext(VulkanProvider* provider, Window* target_window) + : GraphicsContext(provider, target_window) {} + +VulkanContext::~VulkanContext() { + auto provider = static_cast(provider_); + auto device = provider->device(); + vkQueueWaitIdle(device->primary_queue()); + immediate_drawer_.reset(); + swap_chain_.reset(); + if (cmd_pool_) { + vkDestroyCommandPool(*device, cmd_pool_, nullptr); + } +} + +bool VulkanContext::Initialize() { + auto provider = static_cast(provider_); + auto device = provider->device(); + + // All context-specific commands will be allocated from this. + // We may want to have additional pools for different rendering subsystems. + VkCommandPoolCreateInfo cmd_pool_info; + cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + cmd_pool_info.pNext = nullptr; + cmd_pool_info.queueFamilyIndex = device->queue_family_index(); + cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT; + auto err = vkCreateCommandPool(*device, &cmd_pool_info, nullptr, &cmd_pool_); + CheckResult(err, "vkCreateCommandPool"); + + if (target_window_) { + // Create swap chain used to present to the window. + VkSurfaceKHR surface = nullptr; +#if XE_PLATFORM_WIN32 + VkWin32SurfaceCreateInfoKHR create_info; + create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; + create_info.pNext = nullptr; + create_info.flags = 0; + create_info.hinstance = + static_cast(target_window_->native_platform_handle()); + create_info.hwnd = static_cast(target_window_->native_handle()); + err = vkCreateWin32SurfaceKHR(*provider->instance(), &create_info, nullptr, + &surface); + CheckResult(err, "vkCreateWin32SurfaceKHR"); +#else +#error Platform not yet implemented. +#endif // XE_PLATFORM_WIN32 + swap_chain_ = std::make_unique(provider->instance(), + provider->device()); + if (!swap_chain_->Initialize(surface)) { + XELOGE("Unable to initialize swap chain"); + vkDestroySurfaceKHR(*provider->instance(), surface, nullptr); + return false; + } + + // Only initialize immediate mode drawer if we are not an offscreen context. + immediate_drawer_ = std::make_unique(this); + } + + return true; +} + +ImmediateDrawer* VulkanContext::immediate_drawer() { + return immediate_drawer_.get(); +} + +VulkanInstance* VulkanContext::instance() const { + return static_cast(provider_)->instance(); +} + +VulkanDevice* VulkanContext::device() const { + return static_cast(provider_)->device(); +} + +bool VulkanContext::is_current() { return false; } + +bool VulkanContext::MakeCurrent() { + SCOPE_profile_cpu_f("gpu"); + return true; +} + +void VulkanContext::ClearCurrent() {} + +void VulkanContext::BeginSwap() { + SCOPE_profile_cpu_f("gpu"); + auto provider = static_cast(provider_); + auto device = provider->device(); + + // Acquire the next image and set it up for use. + swap_chain_->Begin(); + + // TODO(benvanik): use a fence instead? May not be possible with target image. + auto err = vkQueueWaitIdle(device->primary_queue()); + CheckResult(err, "vkQueueWaitIdle"); +} + +void VulkanContext::EndSwap() { + SCOPE_profile_cpu_f("gpu"); + auto provider = static_cast(provider_); + auto device = provider->device(); + + // Notify the presentation engine the image is ready. + // The contents must be in a coherent state. + swap_chain_->End(); + + // Wait until the queue is idle. + // TODO(benvanik): is this required? + auto err = vkQueueWaitIdle(device->primary_queue()); + CheckResult(err, "vkQueueWaitIdle"); +} + +std::unique_ptr VulkanContext::Capture() { + assert_always(); + return nullptr; +} + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_context.h b/src/xenia/ui/vulkan/vulkan_context.h new file mode 100644 index 000000000..1893ca287 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_context.h @@ -0,0 +1,63 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_CONTEXT_H_ +#define XENIA_UI_VULKAN_VULKAN_CONTEXT_H_ + +#include + +#include "xenia/ui/graphics_context.h" +#include "xenia/ui/vulkan/vulkan.h" + +namespace xe { +namespace ui { +namespace vulkan { + +class VulkanDevice; +class VulkanImmediateDrawer; +class VulkanInstance; +class VulkanProvider; +class VulkanSwapChain; + +class VulkanContext : public GraphicsContext { + public: + ~VulkanContext() override; + + ImmediateDrawer* immediate_drawer() override; + VulkanSwapChain* swap_chain() const { return swap_chain_.get(); } + VulkanInstance* instance() const; + VulkanDevice* device() const; + + bool is_current() override; + bool MakeCurrent() override; + void ClearCurrent() override; + + void BeginSwap() override; + void EndSwap() override; + + std::unique_ptr Capture() override; + + private: + friend class VulkanProvider; + + explicit VulkanContext(VulkanProvider* provider, Window* target_window); + + private: + bool Initialize(); + + std::unique_ptr swap_chain_; + std::unique_ptr immediate_drawer_; + VkCommandPool cmd_pool_ = nullptr; +}; + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_CONTEXT_H_ diff --git a/src/xenia/ui/vulkan/vulkan_device.cc b/src/xenia/ui/vulkan/vulkan_device.cc new file mode 100644 index 000000000..8f862f444 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_device.cc @@ -0,0 +1,222 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_device.h" + +#include + +#include +#include +#include + +#include "xenia/base/assert.h" +#include "xenia/base/logging.h" +#include "xenia/base/math.h" +#include "xenia/base/profiling.h" +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_immediate_drawer.h" +#include "xenia/ui/vulkan/vulkan_util.h" +#include "xenia/ui/window.h" + +namespace xe { +namespace ui { +namespace vulkan { + +VulkanDevice::VulkanDevice(VulkanInstance* instance) : instance_(instance) { + if (FLAGS_vulkan_validation) { + /*DeclareRequiredLayer("VK_LAYER_GOOGLE_unique_objects", + Version::Make(0, 0, 0), true);*/ + DeclareRequiredLayer("VK_LAYER_LUNARG_threading", Version::Make(0, 0, 0), + true); + /*DeclareRequiredLayer("VK_LAYER_LUNARG_mem_tracker", Version::Make(0, 0, + 0), + true);*/ + DeclareRequiredLayer("VK_LAYER_LUNARG_object_tracker", + Version::Make(0, 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_draw_state", Version::Make(0, 0, 0), + true); + DeclareRequiredLayer("VK_LAYER_LUNARG_param_checker", + Version::Make(0, 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_swapchain", Version::Make(0, 0, 0), + true); + DeclareRequiredLayer("VK_LAYER_LUNARG_device_limits", + Version::Make(0, 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_image", Version::Make(0, 0, 0), true); + } +} + +VulkanDevice::~VulkanDevice() { + if (handle) { + vkDestroyDevice(handle, nullptr); + handle = nullptr; + } +} + +bool VulkanDevice::Initialize(DeviceInfo device_info) { + // Gather list of enabled layer names. + auto layers_result = CheckRequirements(required_layers_, device_info.layers); + auto& enabled_layers = layers_result.second; + + // Gather list of enabled extension names. + auto extensions_result = + CheckRequirements(required_extensions_, device_info.extensions); + auto& enabled_extensions = extensions_result.second; + + // We wait until both extensions and layers are checked before failing out so + // that the user gets a complete list of what they have/don't. + if (!extensions_result.first || !layers_result.first) { + FatalVulkanError( + "Layer and extension verification failed; aborting initialization"); + return false; + } + + // Query supported features so we can make sure we have what we need. + VkPhysicalDeviceFeatures supported_features; + vkGetPhysicalDeviceFeatures(device_info.handle, &supported_features); + VkPhysicalDeviceFeatures enabled_features = {0}; + bool any_features_missing = false; +#define ENABLE_AND_EXPECT(name) \ + if (!supported_features.name) { \ + any_features_missing = true; \ + FatalVulkanError("Vulkan device is missing feature " #name); \ + } else { \ + enabled_features.name = VK_TRUE; \ + } + ENABLE_AND_EXPECT(geometryShader); + ENABLE_AND_EXPECT(depthClamp); + ENABLE_AND_EXPECT(alphaToOne); + ENABLE_AND_EXPECT(multiViewport); + // TODO(benvanik): add other features. + if (any_features_missing) { + XELOGE( + "One or more required device features are missing; aborting " + "initialization"); + return false; + } + + // Pick a queue. + // Any queue we use must support both graphics and presentation. + // TODO(benvanik): use multiple queues (DMA-only, compute-only, etc). + if (device_info.queue_family_properties.empty()) { + FatalVulkanError("No queue families available"); + return false; + } + uint32_t ideal_queue_family_index = UINT_MAX; + uint32_t queue_count = 1; + for (size_t i = 0; i < device_info.queue_family_properties.size(); ++i) { + auto queue_flags = device_info.queue_family_properties[i].queueFlags; + if (!device_info.queue_family_supports_present[i]) { + // Can't present from this queue, so ignore it. + continue; + } + if (queue_flags & VK_QUEUE_GRAPHICS_BIT) { + // Can do graphics and present - good! + ideal_queue_family_index = static_cast(i); + // TODO(benvanik): pick a higher queue count? + queue_count = 1; + break; + } + } + if (ideal_queue_family_index == UINT_MAX) { + FatalVulkanError( + "No queue families available that can both do graphics and present"); + return false; + } + + VkDeviceQueueCreateInfo queue_info; + queue_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_info.pNext = nullptr; + queue_info.flags = 0; + queue_info.queueFamilyIndex = ideal_queue_family_index; + queue_info.queueCount = queue_count; + std::vector queue_priorities(queue_count); + queue_info.pQueuePriorities = queue_priorities.data(); + + VkDeviceCreateInfo create_info; + create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + create_info.pNext = nullptr; + create_info.flags = 0; + create_info.queueCreateInfoCount = 1; + create_info.pQueueCreateInfos = &queue_info; + create_info.enabledLayerCount = static_cast(enabled_layers.size()); + create_info.ppEnabledLayerNames = enabled_layers.data(); + create_info.enabledExtensionCount = + static_cast(enabled_extensions.size()); + create_info.ppEnabledExtensionNames = enabled_extensions.data(); + create_info.pEnabledFeatures = &enabled_features; + + auto err = vkCreateDevice(device_info.handle, &create_info, nullptr, &handle); + switch (err) { + case VK_SUCCESS: + // Ok! + break; + case VK_ERROR_INITIALIZATION_FAILED: + FatalVulkanError("Device initialization failed; generic"); + return false; + case VK_ERROR_EXTENSION_NOT_PRESENT: + FatalVulkanError( + "Device initialization failed; requested extension not present"); + return false; + case VK_ERROR_LAYER_NOT_PRESENT: + FatalVulkanError( + "Device initialization failed; requested layer not present"); + return false; + default: + FatalVulkanError(std::string("Device initialization failed; unknown: ") + + to_string(err)); + return false; + } + + device_info_ = std::move(device_info); + queue_family_index_ = ideal_queue_family_index; + + // Get the primary queue used for most submissions/etc. + vkGetDeviceQueue(handle, queue_family_index_, 0, &primary_queue_); + + XELOGVK("Device initialized successfully!"); + return true; +} + +VkDeviceMemory VulkanDevice::AllocateMemory( + const VkMemoryRequirements& requirements, VkFlags required_properties) { + // Search memory types to find one matching our requirements and our + // properties. + uint32_t type_index = UINT_MAX; + for (uint32_t i = 0; i < device_info_.memory_properties.memoryTypeCount; + ++i) { + const auto& memory_type = device_info_.memory_properties.memoryTypes[i]; + if (((requirements.memoryTypeBits >> i) & 1) == 1) { + // Type is available for use; check for a match on properties. + if ((memory_type.propertyFlags & required_properties) == + required_properties) { + type_index = i; + break; + } + } + } + if (type_index == UINT_MAX) { + XELOGE("Unable to find a matching memory type"); + return nullptr; + } + + // Allocate the memory. + VkMemoryAllocateInfo memory_info; + memory_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; + memory_info.pNext = nullptr; + memory_info.allocationSize = requirements.size; + memory_info.memoryTypeIndex = type_index; + VkDeviceMemory memory = nullptr; + auto err = vkAllocateMemory(handle, &memory_info, nullptr, &memory); + CheckResult(err, "vkAllocateMemory"); + return memory; +} + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_device.h b/src/xenia/ui/vulkan/vulkan_device.h new file mode 100644 index 000000000..f1194d662 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_device.h @@ -0,0 +1,83 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2014 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_DEVICE_H_ +#define XENIA_UI_VULKAN_VULKAN_DEVICE_H_ + +#include +#include +#include + +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_util.h" + +namespace xe { +namespace ui { +namespace vulkan { + +class VulkanInstance; + +// Wrapper and utilities for VkDevice. +// Prefer passing this around over a VkDevice and casting as needed to call +// APIs. +class VulkanDevice { + public: + VulkanDevice(VulkanInstance* instance); + ~VulkanDevice(); + + VkDevice handle = nullptr; + + operator VkDevice() const { return handle; } + operator VkPhysicalDevice() const { return device_info_.handle; } + + // Declares a layer to verify and enable upon initialization. + // Must be called before Initialize. + void DeclareRequiredLayer(std::string name, uint32_t min_version, + bool is_optional) { + required_layers_.push_back({name, min_version, is_optional}); + } + + // Declares an extension to verify and enable upon initialization. + // Must be called before Initialize. + void DeclareRequiredExtension(std::string name, uint32_t min_version, + bool is_optional) { + required_extensions_.push_back({name, min_version, is_optional}); + } + + // Initializes the device, querying and enabling extensions and layers and + // preparing the device for general use. + // If initialization succeeds it's likely that no more failures beyond runtime + // issues will occur. + bool Initialize(DeviceInfo device_info); + + uint32_t queue_family_index() const { return queue_family_index_; } + VkQueue primary_queue() const { return primary_queue_; } + const DeviceInfo& device_info() const { return device_info_; } + + // Allocates memory of the given size matching the required properties. + VkDeviceMemory AllocateMemory( + const VkMemoryRequirements& requirements, + VkFlags required_properties = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT); + + private: + VulkanInstance* instance_ = nullptr; + + std::vector required_layers_; + std::vector required_extensions_; + + DeviceInfo device_info_; + uint32_t queue_family_index_ = 0; + VkQueue primary_queue_ = nullptr; +}; + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_DEVICE_H_ diff --git a/src/xenia/ui/vulkan/vulkan_immediate_drawer.cc b/src/xenia/ui/vulkan/vulkan_immediate_drawer.cc new file mode 100644 index 000000000..97b31de98 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_immediate_drawer.cc @@ -0,0 +1,734 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_immediate_drawer.h" + +#include "xenia/base/assert.h" +#include "xenia/base/math.h" +#include "xenia/ui/graphics_context.h" +#include "xenia/ui/vulkan/vulkan_context.h" +#include "xenia/ui/vulkan/vulkan_device.h" +#include "xenia/ui/vulkan/vulkan_swap_chain.h" + +namespace xe { +namespace ui { +namespace vulkan { + +#include "xenia/ui/vulkan/shaders/immediate.frag.h" +#include "xenia/ui/vulkan/shaders/immediate.vert.h" + +constexpr uint32_t kCircularBufferCapacity = 2 * 1024 * 1024; + +class LightweightCircularBuffer { + public: + LightweightCircularBuffer(VulkanDevice* device) : device_(*device) { + buffer_capacity_ = xe::round_up(kCircularBufferCapacity, 4096); + + // Index buffer. + VkBufferCreateInfo index_buffer_info; + index_buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + index_buffer_info.pNext = nullptr; + index_buffer_info.flags = 0; + index_buffer_info.size = buffer_capacity_; + index_buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT; + index_buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + index_buffer_info.queueFamilyIndexCount = 0; + index_buffer_info.pQueueFamilyIndices = nullptr; + auto err = + vkCreateBuffer(device_, &index_buffer_info, nullptr, &index_buffer_); + CheckResult(err, "vkCreateBuffer"); + + // Vertex buffer. + VkBufferCreateInfo vertex_buffer_info; + vertex_buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + vertex_buffer_info.pNext = nullptr; + vertex_buffer_info.flags = 0; + vertex_buffer_info.size = buffer_capacity_; + vertex_buffer_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; + vertex_buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + vertex_buffer_info.queueFamilyIndexCount = 0; + vertex_buffer_info.pQueueFamilyIndices = nullptr; + err = + vkCreateBuffer(*device, &vertex_buffer_info, nullptr, &vertex_buffer_); + CheckResult(err, "vkCreateBuffer"); + + // Allocate underlying buffer. + // We alias it for both vertices and indices. + VkMemoryRequirements buffer_requirements; + vkGetBufferMemoryRequirements(device_, index_buffer_, &buffer_requirements); + buffer_memory_ = device->AllocateMemory( + buffer_requirements, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT); + vkBindBufferMemory(*device, index_buffer_, buffer_memory_, 0); + vkBindBufferMemory(*device, vertex_buffer_, buffer_memory_, 0); + + // Persistent mapping. + err = vkMapMemory(device_, buffer_memory_, 0, VK_WHOLE_SIZE, 0, + &buffer_data_); + CheckResult(err, "vkMapMemory"); + } + + ~LightweightCircularBuffer() { + vkUnmapMemory(device_, buffer_memory_); + vkDestroyBuffer(device_, index_buffer_, nullptr); + vkDestroyBuffer(device_, vertex_buffer_, nullptr); + vkFreeMemory(device_, buffer_memory_, nullptr); + } + + VkBuffer vertex_buffer() const { return vertex_buffer_; } + VkBuffer index_buffer() const { return index_buffer_; } + + // Allocates space for data and copies it into the buffer. + // Returns the offset in the buffer of the data or VK_WHOLE_SIZE if the buffer + // is full. + VkDeviceSize Emplace(const void* source_data, size_t source_length) { + // TODO(benvanik): query actual alignment. + source_length = xe::round_up(source_length, 256); + + // Run down old fences to free up space. + + // Check to see if we have space. + // return VK_WHOLE_SIZE; + + // Compute new range and mark as in use. + if (current_offset_ + source_length > buffer_capacity_) { + // Wraps around. + current_offset_ = 0; + } + VkDeviceSize offset = current_offset_; + current_offset_ += source_length; + + // Copy data. + auto dest_ptr = reinterpret_cast(buffer_data_) + offset; + std::memcpy(dest_ptr, source_data, source_length); + + // Insert fence. + // TODO(benvanik): coarse-grained fences, these may be too fine. + + // Flush memory. + // TODO(benvanik): do only in large batches? can barrier it. + VkMappedMemoryRange dirty_range; + dirty_range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; + dirty_range.pNext = nullptr; + dirty_range.memory = buffer_memory_; + dirty_range.offset = offset; + dirty_range.size = source_length; + vkFlushMappedMemoryRanges(device_, 1, &dirty_range); + return offset; + } + + private: + VkDevice device_ = nullptr; + + VkBuffer index_buffer_ = nullptr; + VkBuffer vertex_buffer_ = nullptr; + VkDeviceMemory buffer_memory_ = nullptr; + void* buffer_data_ = nullptr; + size_t buffer_capacity_ = 0; + size_t current_offset_ = 0; +}; + +class VulkanImmediateTexture : public ImmediateTexture { + public: + VulkanImmediateTexture(VulkanDevice* device, VkDescriptorPool descriptor_pool, + VkDescriptorSetLayout descriptor_set_layout, + VkSampler sampler, uint32_t width, uint32_t height) + : ImmediateTexture(width, height), + device_(*device), + descriptor_pool_(descriptor_pool), + sampler_(sampler) { + handle = reinterpret_cast(this); + + // Create image object. + VkImageCreateInfo image_info; + image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_info.pNext = nullptr; + image_info.flags = 0; + image_info.imageType = VK_IMAGE_TYPE_2D; + image_info.format = VK_FORMAT_R8G8B8A8_UNORM; + image_info.extent = {width, height, 1}; + image_info.mipLevels = 1; + image_info.arrayLayers = 1; + image_info.samples = VK_SAMPLE_COUNT_1_BIT; + image_info.tiling = VK_IMAGE_TILING_LINEAR; + image_info.usage = VK_IMAGE_USAGE_SAMPLED_BIT; + image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + image_info.queueFamilyIndexCount = 0; + image_info.pQueueFamilyIndices = nullptr; + image_info.initialLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + auto err = vkCreateImage(device_, &image_info, nullptr, &image_); + CheckResult(err, "vkCreateImage"); + + // Allocate memory for the image. + VkMemoryRequirements memory_requirements; + vkGetImageMemoryRequirements(device_, image_, &memory_requirements); + device_memory_ = device->AllocateMemory( + memory_requirements, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT); + + // Bind memory and the image together. + err = vkBindImageMemory(device_, image_, device_memory_, 0); + CheckResult(err, "vkBindImageMemory"); + + // Create image view used by the shader. + VkImageViewCreateInfo view_info; + view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + view_info.pNext = nullptr; + view_info.flags = 0; + view_info.image = image_; + view_info.viewType = VK_IMAGE_VIEW_TYPE_2D; + view_info.format = VK_FORMAT_R8G8B8A8_UNORM; + view_info.components = { + VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, + VK_COMPONENT_SWIZZLE_A, + }; + view_info.subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}; + err = vkCreateImageView(device_, &view_info, nullptr, &image_view_); + CheckResult(err, "vkCreateImageView"); + + // Create descriptor set used just for this texture. + // It never changes, so we can reuse it and not worry with updates. + VkDescriptorSetAllocateInfo set_alloc_info; + set_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + set_alloc_info.pNext = nullptr; + set_alloc_info.descriptorPool = descriptor_pool_; + set_alloc_info.descriptorSetCount = 1; + set_alloc_info.pSetLayouts = &descriptor_set_layout; + err = vkAllocateDescriptorSets(device_, &set_alloc_info, &descriptor_set_); + CheckResult(err, "vkAllocateDescriptorSets"); + + // Initialize descriptor with our texture. + VkDescriptorImageInfo texture_info; + texture_info.sampler = sampler_; + texture_info.imageView = image_view_; + texture_info.imageLayout = VK_IMAGE_LAYOUT_GENERAL; + VkWriteDescriptorSet descriptor_write; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.pNext = nullptr; + descriptor_write.dstSet = descriptor_set_; + descriptor_write.dstBinding = 0; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorCount = 1; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + descriptor_write.pImageInfo = &texture_info; + vkUpdateDescriptorSets(device_, 1, &descriptor_write, 0, nullptr); + } + + ~VulkanImmediateTexture() override { + vkFreeDescriptorSets(device_, descriptor_pool_, 1, &descriptor_set_); + vkDestroyImageView(device_, image_view_, nullptr); + vkDestroyImage(device_, image_, nullptr); + vkFreeMemory(device_, device_memory_, nullptr); + } + + void Upload(const uint8_t* src_data) { + // TODO(benvanik): assert not in use? textures aren't dynamic right now. + + // Get device image layout. + VkImageSubresource subresource; + subresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + subresource.mipLevel = 0; + subresource.arrayLayer = 0; + VkSubresourceLayout layout; + vkGetImageSubresourceLayout(device_, image_, &subresource, &layout); + + // Map memory for upload. + void* gpu_data = nullptr; + auto err = + vkMapMemory(device_, device_memory_, 0, layout.size, 0, &gpu_data); + CheckResult(err, "vkMapMemory"); + + // Copy the entire texture, hoping its layout matches what we expect. + std::memcpy(gpu_data, src_data, layout.size); + + vkUnmapMemory(device_, device_memory_); + } + + VkDescriptorSet descriptor_set() const { return descriptor_set_; } + + private: + VkDevice device_ = nullptr; + VkDescriptorPool descriptor_pool_ = nullptr; + VkSampler sampler_ = nullptr; // Not owned. + VkImage image_ = nullptr; + VkImageLayout image_layout_ = VK_IMAGE_LAYOUT_UNDEFINED; + VkDeviceMemory device_memory_ = nullptr; + VkImageView image_view_ = nullptr; + VkDescriptorSet descriptor_set_ = nullptr; +}; + +VulkanImmediateDrawer::VulkanImmediateDrawer(VulkanContext* graphics_context) + : ImmediateDrawer(graphics_context), context_(graphics_context) { + auto device = context_->device(); + + // NEAREST + CLAMP + VkSamplerCreateInfo sampler_info; + sampler_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + sampler_info.pNext = nullptr; + sampler_info.magFilter = VK_FILTER_NEAREST; + sampler_info.minFilter = VK_FILTER_NEAREST; + sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST; + sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + sampler_info.mipLodBias = 0.0f; + sampler_info.anisotropyEnable = VK_FALSE; + sampler_info.maxAnisotropy = 1; + sampler_info.compareOp = VK_COMPARE_OP_NEVER; + sampler_info.minLod = 0.0f; + sampler_info.maxLod = 0.0f; + sampler_info.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE; + sampler_info.unnormalizedCoordinates = VK_FALSE; + auto err = vkCreateSampler(*device, &sampler_info, nullptr, + &samplers_.nearest_clamp); + CheckResult(err, "vkCreateSampler"); + + // NEAREST + REPEAT + sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT; + sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT; + sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT; + err = vkCreateSampler(*device, &sampler_info, nullptr, + &samplers_.nearest_repeat); + CheckResult(err, "vkCreateSampler"); + + // LINEAR + CLAMP + sampler_info.magFilter = VK_FILTER_LINEAR; + sampler_info.minFilter = VK_FILTER_LINEAR; + sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + err = + vkCreateSampler(*device, &sampler_info, nullptr, &samplers_.linear_clamp); + CheckResult(err, "vkCreateSampler"); + + // LINEAR + REPEAT + sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT; + sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT; + sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT; + err = vkCreateSampler(*device, &sampler_info, nullptr, + &samplers_.linear_repeat); + CheckResult(err, "vkCreateSampler"); + + // Create the descriptor set layout used for our texture sampler. + // As it changes almost every draw we keep it separate from the uniform buffer + // and cache it on the textures. + VkDescriptorSetLayoutCreateInfo texture_set_layout_info; + texture_set_layout_info.sType = + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + texture_set_layout_info.pNext = nullptr; + texture_set_layout_info.flags = 0; + texture_set_layout_info.bindingCount = 1; + VkDescriptorSetLayoutBinding texture_binding; + texture_binding.binding = 0; + texture_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + texture_binding.descriptorCount = 1; + texture_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT; + texture_binding.pImmutableSamplers = nullptr; + texture_set_layout_info.pBindings = &texture_binding; + err = vkCreateDescriptorSetLayout(*device, &texture_set_layout_info, nullptr, + &texture_set_layout_); + CheckResult(err, "vkCreateDescriptorSetLayout"); + + // Descriptor pool used for all of our cached descriptors. + // In the steady state we don't allocate anything, so these are all manually + // managed. + VkDescriptorPoolCreateInfo descriptor_pool_info; + descriptor_pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + descriptor_pool_info.pNext = nullptr; + descriptor_pool_info.flags = + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; + descriptor_pool_info.maxSets = 128; + VkDescriptorPoolSize pool_sizes[1]; + pool_sizes[0].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + pool_sizes[0].descriptorCount = 128; + descriptor_pool_info.poolSizeCount = 1; + descriptor_pool_info.pPoolSizes = pool_sizes; + err = vkCreateDescriptorPool(*device, &descriptor_pool_info, nullptr, + &descriptor_pool_); + CheckResult(err, "vkCreateDescriptorPool"); + + // Create the pipeline layout used for our pipeline. + // If we had multiple pipelines they would share this. + VkPipelineLayoutCreateInfo pipeline_layout_info; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.pNext = nullptr; + pipeline_layout_info.flags = 0; + VkDescriptorSetLayout set_layouts[] = {texture_set_layout_}; + pipeline_layout_info.setLayoutCount = + static_cast(xe::countof(set_layouts)); + pipeline_layout_info.pSetLayouts = set_layouts; + VkPushConstantRange push_constant_ranges[2]; + push_constant_ranges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT; + push_constant_ranges[0].offset = 0; + push_constant_ranges[0].size = sizeof(float) * 16; + push_constant_ranges[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT; + push_constant_ranges[1].offset = sizeof(float) * 16; + push_constant_ranges[1].size = sizeof(int); + pipeline_layout_info.pushConstantRangeCount = + static_cast(xe::countof(push_constant_ranges)); + pipeline_layout_info.pPushConstantRanges = push_constant_ranges; + err = vkCreatePipelineLayout(*device, &pipeline_layout_info, nullptr, + &pipeline_layout_); + CheckResult(err, "vkCreatePipelineLayout"); + + // Vertex and fragment shaders. + VkShaderModuleCreateInfo vertex_shader_info; + vertex_shader_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + vertex_shader_info.pNext = nullptr; + vertex_shader_info.flags = 0; + vertex_shader_info.codeSize = sizeof(immediate_vert_spv); + vertex_shader_info.pCode = + reinterpret_cast(immediate_vert_spv); + VkShaderModule vertex_shader; + err = vkCreateShaderModule(*device, &vertex_shader_info, nullptr, + &vertex_shader); + CheckResult(err, "vkCreateShaderModule"); + VkShaderModuleCreateInfo fragment_shader_info; + fragment_shader_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + fragment_shader_info.pNext = nullptr; + fragment_shader_info.flags = 0; + fragment_shader_info.codeSize = sizeof(immediate_frag_spv); + fragment_shader_info.pCode = + reinterpret_cast(immediate_frag_spv); + VkShaderModule fragment_shader; + err = vkCreateShaderModule(*device, &fragment_shader_info, nullptr, + &fragment_shader); + CheckResult(err, "vkCreateShaderModule"); + + // Pipeline used when rendering triangles. + VkGraphicsPipelineCreateInfo pipeline_info; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.pNext = nullptr; + pipeline_info.flags = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT; + VkPipelineShaderStageCreateInfo pipeline_stages[2]; + pipeline_stages[0].sType = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + pipeline_stages[0].pNext = nullptr; + pipeline_stages[0].flags = 0; + pipeline_stages[0].stage = VK_SHADER_STAGE_VERTEX_BIT; + pipeline_stages[0].module = vertex_shader; + pipeline_stages[0].pName = "main"; + pipeline_stages[0].pSpecializationInfo = nullptr; + pipeline_stages[1].sType = + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + pipeline_stages[1].pNext = nullptr; + pipeline_stages[1].flags = 0; + pipeline_stages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT; + pipeline_stages[1].module = fragment_shader; + pipeline_stages[1].pName = "main"; + pipeline_stages[1].pSpecializationInfo = nullptr; + pipeline_info.stageCount = 2; + pipeline_info.pStages = pipeline_stages; + VkPipelineVertexInputStateCreateInfo vertex_state_info; + vertex_state_info.sType = + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_state_info.pNext = nullptr; + VkVertexInputBindingDescription vertex_binding_descrs[1]; + vertex_binding_descrs[0].binding = 0; + vertex_binding_descrs[0].stride = sizeof(ImmediateVertex); + vertex_binding_descrs[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX; + vertex_state_info.vertexBindingDescriptionCount = + static_cast(xe::countof(vertex_binding_descrs)); + vertex_state_info.pVertexBindingDescriptions = vertex_binding_descrs; + VkVertexInputAttributeDescription vertex_attrib_descrs[3]; + vertex_attrib_descrs[0].location = 0; + vertex_attrib_descrs[0].binding = 0; + vertex_attrib_descrs[0].format = VK_FORMAT_R32G32_SFLOAT; + vertex_attrib_descrs[0].offset = offsetof(ImmediateVertex, x); + vertex_attrib_descrs[1].location = 1; + vertex_attrib_descrs[1].binding = 0; + vertex_attrib_descrs[1].format = VK_FORMAT_R32G32_SFLOAT; + vertex_attrib_descrs[1].offset = offsetof(ImmediateVertex, u); + vertex_attrib_descrs[2].location = 2; + vertex_attrib_descrs[2].binding = 0; + vertex_attrib_descrs[2].format = VK_FORMAT_R8G8B8A8_UNORM; + vertex_attrib_descrs[2].offset = offsetof(ImmediateVertex, color); + vertex_state_info.vertexAttributeDescriptionCount = + static_cast(xe::countof(vertex_attrib_descrs)); + vertex_state_info.pVertexAttributeDescriptions = vertex_attrib_descrs; + pipeline_info.pVertexInputState = &vertex_state_info; + VkPipelineInputAssemblyStateCreateInfo input_info; + input_info.sType = + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_info.pNext = nullptr; + input_info.flags = 0; + input_info.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_info.primitiveRestartEnable = VK_FALSE; + pipeline_info.pInputAssemblyState = &input_info; + pipeline_info.pTessellationState = nullptr; + VkPipelineViewportStateCreateInfo viewport_state_info; + viewport_state_info.sType = + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state_info.pNext = nullptr; + viewport_state_info.flags = 0; + viewport_state_info.viewportCount = 1; + viewport_state_info.pViewports = nullptr; + viewport_state_info.scissorCount = 1; + viewport_state_info.pScissors = nullptr; + pipeline_info.pViewportState = &viewport_state_info; + VkPipelineRasterizationStateCreateInfo rasterization_info; + rasterization_info.sType = + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + rasterization_info.pNext = nullptr; + rasterization_info.flags = 0; + rasterization_info.depthClampEnable = VK_FALSE; + rasterization_info.rasterizerDiscardEnable = VK_FALSE; + rasterization_info.polygonMode = VK_POLYGON_MODE_FILL; + rasterization_info.cullMode = VK_CULL_MODE_BACK_BIT; + rasterization_info.frontFace = VK_FRONT_FACE_CLOCKWISE; + rasterization_info.depthBiasEnable = VK_FALSE; + rasterization_info.depthBiasConstantFactor = 0; + rasterization_info.depthBiasClamp = 0; + rasterization_info.depthBiasSlopeFactor = 0; + rasterization_info.lineWidth = 1.0f; + pipeline_info.pRasterizationState = &rasterization_info; + VkPipelineMultisampleStateCreateInfo multisample_info; + multisample_info.sType = + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisample_info.pNext = nullptr; + multisample_info.flags = 0; + multisample_info.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; + multisample_info.sampleShadingEnable = VK_FALSE; + multisample_info.minSampleShading = 0; + multisample_info.pSampleMask = nullptr; + multisample_info.alphaToCoverageEnable = VK_FALSE; + multisample_info.alphaToOneEnable = VK_FALSE; + pipeline_info.pMultisampleState = &multisample_info; + pipeline_info.pDepthStencilState = nullptr; + VkPipelineColorBlendStateCreateInfo blend_info; + blend_info.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + blend_info.pNext = nullptr; + blend_info.flags = 0; + blend_info.logicOpEnable = VK_FALSE; + blend_info.logicOp = VK_LOGIC_OP_NO_OP; + VkPipelineColorBlendAttachmentState blend_attachments[1]; + blend_attachments[0].blendEnable = VK_TRUE; + blend_attachments[0].srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + blend_attachments[0].dstColorBlendFactor = + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + blend_attachments[0].colorBlendOp = VK_BLEND_OP_ADD; + blend_attachments[0].srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + blend_attachments[0].dstAlphaBlendFactor = + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + blend_attachments[0].alphaBlendOp = VK_BLEND_OP_ADD; + blend_attachments[0].colorWriteMask = 0xF; + blend_info.attachmentCount = + static_cast(xe::countof(blend_attachments)); + blend_info.pAttachments = blend_attachments; + std::memset(blend_info.blendConstants, 0, sizeof(blend_info.blendConstants)); + pipeline_info.pColorBlendState = &blend_info; + VkPipelineDynamicStateCreateInfo dynamic_state_info; + dynamic_state_info.sType = + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_state_info.pNext = nullptr; + dynamic_state_info.flags = 0; + VkDynamicState dynamic_states[] = { + VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, + }; + dynamic_state_info.dynamicStateCount = + static_cast(xe::countof(dynamic_states)); + dynamic_state_info.pDynamicStates = dynamic_states; + pipeline_info.pDynamicState = &dynamic_state_info; + pipeline_info.layout = pipeline_layout_; + pipeline_info.renderPass = context_->swap_chain()->render_pass(); + pipeline_info.subpass = 0; + pipeline_info.basePipelineHandle = nullptr; + pipeline_info.basePipelineIndex = 0; + err = vkCreateGraphicsPipelines(*device, nullptr, 1, &pipeline_info, nullptr, + &triangle_pipeline_); + CheckResult(err, "vkCreateGraphicsPipelines"); + + // Silly, but let's make a pipeline just for drawing lines. + pipeline_info.flags = VK_PIPELINE_CREATE_DERIVATIVE_BIT; + input_info.topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST; + pipeline_info.basePipelineHandle = triangle_pipeline_; + pipeline_info.basePipelineIndex = 0; + err = vkCreateGraphicsPipelines(*device, nullptr, 1, &pipeline_info, nullptr, + &line_pipeline_); + CheckResult(err, "vkCreateGraphicsPipelines"); + + vkDestroyShaderModule(*device, vertex_shader, nullptr); + vkDestroyShaderModule(*device, fragment_shader, nullptr); + + // Allocate the buffer we'll use for our vertex and index data. + circular_buffer_ = std::make_unique(device); +} + +VulkanImmediateDrawer::~VulkanImmediateDrawer() { + auto device = context_->device(); + + circular_buffer_.reset(); + + vkDestroyPipeline(*device, line_pipeline_, nullptr); + vkDestroyPipeline(*device, triangle_pipeline_, nullptr); + vkDestroyPipelineLayout(*device, pipeline_layout_, nullptr); + + vkDestroyDescriptorPool(*device, descriptor_pool_, nullptr); + vkDestroyDescriptorSetLayout(*device, texture_set_layout_, nullptr); + + vkDestroySampler(*device, samplers_.nearest_clamp, nullptr); + vkDestroySampler(*device, samplers_.nearest_repeat, nullptr); + vkDestroySampler(*device, samplers_.linear_clamp, nullptr); + vkDestroySampler(*device, samplers_.linear_repeat, nullptr); +} + +std::unique_ptr VulkanImmediateDrawer::CreateTexture( + uint32_t width, uint32_t height, ImmediateTextureFilter filter, bool repeat, + const uint8_t* data) { + auto device = context_->device(); + + VkSampler sampler = nullptr; + switch (filter) { + case ImmediateTextureFilter::kNearest: + sampler = repeat ? samplers_.nearest_repeat : samplers_.nearest_clamp; + break; + case ImmediateTextureFilter::kLinear: + sampler = repeat ? samplers_.linear_repeat : samplers_.linear_clamp; + break; + default: + assert_unhandled_case(filter); + sampler = samplers_.nearest_clamp; + break; + } + + auto texture = std::make_unique( + device, descriptor_pool_, texture_set_layout_, sampler, width, height); + if (data) { + UpdateTexture(texture.get(), data); + } + return std::unique_ptr(texture.release()); +} + +void VulkanImmediateDrawer::UpdateTexture(ImmediateTexture* texture, + const uint8_t* data) { + static_cast(texture)->Upload(data); +} + +void VulkanImmediateDrawer::Begin(int render_target_width, + int render_target_height) { + auto device = context_->device(); + auto swap_chain = context_->swap_chain(); + assert_null(current_cmd_buffer_); + current_cmd_buffer_ = swap_chain->render_cmd_buffer(); + + // Viewport changes only once per batch. + VkViewport viewport; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = static_cast(render_target_width); + viewport.height = static_cast(render_target_height); + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + vkCmdSetViewport(current_cmd_buffer_, 0, 1, &viewport); + + // Update projection matrix. + const float ortho_projection[4][4] = { + {2.0f / render_target_width, 0.0f, 0.0f, 0.0f}, + {0.0f, 2.0f / -render_target_height, 0.0f, 0.0f}, + {0.0f, 0.0f, -1.0f, 0.0f}, + {-1.0f, 1.0f, 0.0f, 1.0f}, + }; + vkCmdPushConstants(current_cmd_buffer_, pipeline_layout_, + VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(float) * 16, + ortho_projection); +} + +void VulkanImmediateDrawer::BeginDrawBatch(const ImmediateDrawBatch& batch) { + auto device = context_->device(); + + // Upload vertices. + VkDeviceSize vertices_offset = circular_buffer_->Emplace( + batch.vertices, batch.vertex_count * sizeof(ImmediateVertex)); + if (vertices_offset == VK_WHOLE_SIZE) { + // TODO(benvanik): die? + return; + } + auto vertex_buffer = circular_buffer_->vertex_buffer(); + vkCmdBindVertexBuffers(current_cmd_buffer_, 0, 1, &vertex_buffer, + &vertices_offset); + + // Upload indices. + if (batch.indices) { + VkDeviceSize indices_offset = circular_buffer_->Emplace( + batch.indices, batch.index_count * sizeof(uint16_t)); + if (indices_offset == VK_WHOLE_SIZE) { + // TODO(benvanik): die? + return; + } + vkCmdBindIndexBuffer(current_cmd_buffer_, circular_buffer_->index_buffer(), + indices_offset, VK_INDEX_TYPE_UINT16); + } + + batch_has_index_buffer_ = !!batch.indices; +} + +void VulkanImmediateDrawer::Draw(const ImmediateDraw& draw) { + auto swap_chain = context_->swap_chain(); + + if (draw.primitive_type != ImmediatePrimitiveType::kTriangles) { + return; + } + switch (draw.primitive_type) { + case ImmediatePrimitiveType::kLines: + vkCmdBindPipeline(current_cmd_buffer_, VK_PIPELINE_BIND_POINT_GRAPHICS, + line_pipeline_); + break; + case ImmediatePrimitiveType::kTriangles: + vkCmdBindPipeline(current_cmd_buffer_, VK_PIPELINE_BIND_POINT_GRAPHICS, + triangle_pipeline_); + break; + } + + // Setup texture binding. + VkDescriptorSet texture_set = nullptr; + auto texture = reinterpret_cast(draw.texture_handle); + if (texture) { + texture_set = texture->descriptor_set(); + } + vkCmdBindDescriptorSets(current_cmd_buffer_, VK_PIPELINE_BIND_POINT_GRAPHICS, + pipeline_layout_, 0, 1, &texture_set, 0, nullptr); + + // Use push constants for our per-draw changes. + // Here, the restrict_texture_samples uniform. + int restrict_texture_samples = draw.restrict_texture_samples ? 1 : 0; + vkCmdPushConstants(current_cmd_buffer_, pipeline_layout_, + VK_SHADER_STAGE_FRAGMENT_BIT, sizeof(float) * 16, + sizeof(int), &restrict_texture_samples); + + // Scissor, if enabled. + // Scissor can be disabled by making it the full screen. + VkRect2D scissor; + if (draw.scissor) { + scissor.offset.x = draw.scissor_rect[0]; + scissor.offset.y = swap_chain->surface_height() - + (draw.scissor_rect[1] + draw.scissor_rect[3]); + scissor.extent.width = draw.scissor_rect[2]; + scissor.extent.height = draw.scissor_rect[3]; + } else { + scissor.offset.x = 0; + scissor.offset.y = 0; + scissor.extent.width = swap_chain->surface_width(); + scissor.extent.height = swap_chain->surface_height(); + } + vkCmdSetScissor(current_cmd_buffer_, 0, 1, &scissor); + + // Issue draw. + if (batch_has_index_buffer_) { + vkCmdDrawIndexed(current_cmd_buffer_, draw.count, 1, draw.index_offset, + draw.base_vertex, 0); + } else { + vkCmdDraw(current_cmd_buffer_, draw.count, 1, draw.base_vertex, 0); + } +} + +void VulkanImmediateDrawer::EndDrawBatch() {} + +void VulkanImmediateDrawer::End() { current_cmd_buffer_ = nullptr; } + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_immediate_drawer.h b/src/xenia/ui/vulkan/vulkan_immediate_drawer.h new file mode 100644 index 000000000..004804e66 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_immediate_drawer.h @@ -0,0 +1,69 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_ +#define XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_ + +#include + +#include "xenia/ui/immediate_drawer.h" +#include "xenia/ui/vulkan/vulkan.h" + +namespace xe { +namespace ui { +namespace vulkan { + +class LightweightCircularBuffer; +class VulkanContext; + +class VulkanImmediateDrawer : public ImmediateDrawer { + public: + VulkanImmediateDrawer(VulkanContext* graphics_context); + ~VulkanImmediateDrawer() override; + + std::unique_ptr CreateTexture(uint32_t width, + uint32_t height, + ImmediateTextureFilter filter, + bool repeat, + const uint8_t* data) override; + void UpdateTexture(ImmediateTexture* texture, const uint8_t* data) override; + + void Begin(int render_target_width, int render_target_height) override; + void BeginDrawBatch(const ImmediateDrawBatch& batch) override; + void Draw(const ImmediateDraw& draw) override; + void EndDrawBatch() override; + void End() override; + + private: + VulkanContext* context_ = nullptr; + + struct { + VkSampler nearest_clamp = nullptr; + VkSampler nearest_repeat = nullptr; + VkSampler linear_clamp = nullptr; + VkSampler linear_repeat = nullptr; + } samplers_; + + VkDescriptorSetLayout texture_set_layout_ = nullptr; + VkDescriptorPool descriptor_pool_ = nullptr; + VkPipelineLayout pipeline_layout_ = nullptr; + VkPipeline triangle_pipeline_ = nullptr; + VkPipeline line_pipeline_ = nullptr; + + std::unique_ptr circular_buffer_; + + bool batch_has_index_buffer_ = false; + VkCommandBuffer current_cmd_buffer_ = nullptr; +}; + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_ diff --git a/src/xenia/ui/vulkan/vulkan_instance.cc b/src/xenia/ui/vulkan/vulkan_instance.cc new file mode 100644 index 000000000..900bf66e1 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_instance.cc @@ -0,0 +1,486 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_instance.h" + +#include + +#include +#include +#include + +#include "xenia/base/assert.h" +#include "xenia/base/logging.h" +#include "xenia/base/math.h" +#include "xenia/base/profiling.h" +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_immediate_drawer.h" +#include "xenia/ui/vulkan/vulkan_util.h" +#include "xenia/ui/window.h" + +namespace xe { +namespace ui { +namespace vulkan { + +VulkanInstance::VulkanInstance() { + if (FLAGS_vulkan_validation) { + // DeclareRequiredLayer("VK_LAYER_GOOGLE_unique_objects", Version::Make(0, + // 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_threading", Version::Make(0, 0, 0), + true); + // DeclareRequiredLayer("VK_LAYER_LUNARG_mem_tracker", Version::Make(0, 0, + // 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_object_tracker", + Version::Make(0, 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_draw_state", Version::Make(0, 0, 0), + true); + DeclareRequiredLayer("VK_LAYER_LUNARG_param_checker", + Version::Make(0, 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_swapchain", Version::Make(0, 0, 0), + true); + DeclareRequiredLayer("VK_LAYER_LUNARG_device_limits", + Version::Make(0, 0, 0), true); + DeclareRequiredLayer("VK_LAYER_LUNARG_image", Version::Make(0, 0, 0), true); + DeclareRequiredExtension(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, + Version::Make(0, 0, 0), true); + } +} + +VulkanInstance::~VulkanInstance() { DestroyInstance(); } + +bool VulkanInstance::Initialize(Window* any_target_window) { + auto version = Version::Parse(VK_API_VERSION); + XELOGVK("Initializing Vulkan %s...", version.pretty_string.c_str()); + + // Get all of the global layers and extensions provided by the system. + if (!QueryGlobals()) { + XELOGE("Failed to query instance globals"); + return false; + } + + // Create the vulkan instance used by the application with our required + // extensions and layers. + if (!CreateInstance()) { + XELOGE("Failed to create instance"); + return false; + } + + // Query available devices so that we can pick one. + if (!QueryDevices(any_target_window)) { + XELOGE("Failed to query devices"); + return false; + } + + XELOGVK("Instance initialized successfully!"); + return true; +} + +bool VulkanInstance::QueryGlobals() { + // Scan global layers and accumulate properties. + // We do this in a loop so that we can allocate the required amount of + // memory and handle race conditions while querying. + uint32_t count = 0; + std::vector global_layer_properties; + VkResult err; + do { + err = vkEnumerateInstanceLayerProperties(&count, nullptr); + CheckResult(err, "vkEnumerateInstanceLayerProperties"); + global_layer_properties.resize(count); + err = vkEnumerateInstanceLayerProperties(&count, + global_layer_properties.data()); + } while (err == VK_INCOMPLETE); + CheckResult(err, "vkEnumerateInstanceLayerProperties"); + global_layers_.resize(count); + for (size_t i = 0; i < global_layers_.size(); ++i) { + auto& global_layer = global_layers_[i]; + global_layer.properties = global_layer_properties[i]; + + // Get all extensions available for the layer. + do { + err = vkEnumerateInstanceExtensionProperties( + global_layer.properties.layerName, &count, nullptr); + CheckResult(err, "vkEnumerateInstanceExtensionProperties"); + global_layer.extensions.resize(count); + err = vkEnumerateInstanceExtensionProperties( + global_layer.properties.layerName, &count, + global_layer.extensions.data()); + } while (err == VK_INCOMPLETE); + CheckResult(err, "vkEnumerateInstanceExtensionProperties"); + } + XELOGVK("Found %d global layers:", global_layers_.size()); + for (size_t i = 0; i < global_layers_.size(); ++i) { + auto& global_layer = global_layers_[i]; + auto spec_version = Version::Parse(global_layer.properties.specVersion); + auto impl_version = + Version::Parse(global_layer.properties.implementationVersion); + XELOGVK("- %s (spec: %s, impl: %s)", global_layer.properties.layerName, + spec_version.pretty_string.c_str(), + impl_version.pretty_string.c_str()); + XELOGVK(" %s", global_layer.properties.description); + if (!global_layer.extensions.empty()) { + XELOGVK(" %d extensions:", global_layer.extensions.size()); + DumpExtensions(global_layer.extensions, " "); + } + } + + // Scan global extensions. + do { + err = vkEnumerateInstanceExtensionProperties(nullptr, &count, nullptr); + CheckResult(err, "vkEnumerateInstanceExtensionProperties"); + global_extensions_.resize(count); + err = vkEnumerateInstanceExtensionProperties(nullptr, &count, + global_extensions_.data()); + } while (err == VK_INCOMPLETE); + CheckResult(err, "vkEnumerateInstanceExtensionProperties"); + XELOGVK("Found %d global extensions:", global_extensions_.size()); + DumpExtensions(global_extensions_, ""); + + return true; +} + +bool VulkanInstance::CreateInstance() { + XELOGVK("Verifying layers and extensions..."); + + // Gather list of enabled layer names. + auto layers_result = CheckRequirements(required_layers_, global_layers_); + auto& enabled_layers = layers_result.second; + + // Gather list of enabled extension names. + auto extensions_result = + CheckRequirements(required_extensions_, global_extensions_); + auto& enabled_extensions = extensions_result.second; + + // We wait until both extensions and layers are checked before failing out so + // that the user gets a complete list of what they have/don't. + if (!extensions_result.first || !layers_result.first) { + XELOGE("Layer and extension verification failed; aborting initialization"); + return false; + } + + XELOGVK("Initializing application instance..."); + + // TODO(benvanik): use GetEntryInfo? + VkApplicationInfo application_info; + application_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + application_info.pNext = nullptr; + application_info.pApplicationName = "xenia"; + application_info.applicationVersion = 1; + application_info.pEngineName = "xenia"; + application_info.engineVersion = 1; + application_info.apiVersion = VK_API_VERSION; + + VkInstanceCreateInfo instance_info; + instance_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + instance_info.pNext = nullptr; + instance_info.flags = 0; + instance_info.pApplicationInfo = &application_info; + instance_info.enabledLayerCount = + static_cast(enabled_layers.size()); + instance_info.ppEnabledLayerNames = enabled_layers.data(); + instance_info.enabledExtensionCount = + static_cast(enabled_extensions.size()); + instance_info.ppEnabledExtensionNames = enabled_extensions.data(); + + auto err = vkCreateInstance(&instance_info, nullptr, &handle); + switch (err) { + case VK_SUCCESS: + // Ok! + break; + case VK_ERROR_INITIALIZATION_FAILED: + XELOGE("Instance initialization failed; generic"); + return false; + case VK_ERROR_INCOMPATIBLE_DRIVER: + XELOGE( + "Instance initialization failed; cannot find a compatible Vulkan " + "installable client driver (ICD)"); + return false; + case VK_ERROR_EXTENSION_NOT_PRESENT: + XELOGE("Instance initialization failed; requested extension not present"); + return false; + case VK_ERROR_LAYER_NOT_PRESENT: + XELOGE("Instance initialization failed; requested layer not present"); + return false; + default: + XELOGE("Instance initialization failed; unknown: %s", to_string(err)); + return false; + } + + // Enable debug validation, if needed. + EnableDebugValidation(); + + return true; +} + +void VulkanInstance::DestroyInstance() { + if (!handle) { + return; + } + DisableDebugValidation(); + vkDestroyInstance(handle, nullptr); + handle = nullptr; +} + +VkBool32 VKAPI_PTR DebugMessageCallback(VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, void* pUserData) { + auto instance = reinterpret_cast(pUserData); + const char* message_type = "UNKNOWN"; + if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) { + message_type = "ERROR"; + } else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) { + message_type = "WARN"; + } else if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) { + message_type = "PERF WARN"; + } else if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) { + message_type = "INFO"; + } else if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) { + message_type = "DEBUG"; + } + XELOGVK("[%s/%s:%d] %s", pLayerPrefix, message_type, messageCode, pMessage); + return false; +} + +void VulkanInstance::EnableDebugValidation() { + if (dbg_report_callback_) { + DisableDebugValidation(); + } + auto vk_create_debug_report_callback_ext = + reinterpret_cast( + vkGetInstanceProcAddr(handle, "vkCreateDebugReportCallbackEXT")); + if (!vk_create_debug_report_callback_ext) { + XELOGVK("Debug validation layer not installed; ignoring"); + return; + } + VkDebugReportCallbackCreateInfoEXT create_info; + create_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT; + create_info.pNext = nullptr; + // TODO(benvanik): flags to set these. + create_info.flags = + VK_DEBUG_REPORT_INFORMATION_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | + VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_DEBUG_BIT_EXT; + create_info.pfnCallback = &DebugMessageCallback; + create_info.pUserData = this; + auto err = vk_create_debug_report_callback_ext(handle, &create_info, nullptr, + &dbg_report_callback_); + CheckResult(err, "vkCreateDebugReportCallbackEXT"); + XELOGVK("Debug validation layer enabled"); +} + +void VulkanInstance::DisableDebugValidation() { + if (!dbg_report_callback_) { + return; + } + auto vk_destroy_debug_report_callback_ext = + reinterpret_cast( + vkGetInstanceProcAddr(handle, "vkDestroyDebugReportCallbackEXT")); + if (!vk_destroy_debug_report_callback_ext) { + return; + } + vk_destroy_debug_report_callback_ext(handle, dbg_report_callback_, nullptr); + dbg_report_callback_ = nullptr; +} + +bool VulkanInstance::QueryDevices(Window* any_target_window) { + // Get handles to all devices. + uint32_t count = 0; + std::vector device_handles; + auto err = vkEnumeratePhysicalDevices(handle, &count, nullptr); + CheckResult(err, "vkEnumeratePhysicalDevices"); + device_handles.resize(count); + err = vkEnumeratePhysicalDevices(handle, &count, device_handles.data()); + CheckResult(err, "vkEnumeratePhysicalDevices"); + + // Query device info. + for (size_t i = 0; i < device_handles.size(); ++i) { + auto device_handle = device_handles[i]; + DeviceInfo device_info; + device_info.handle = device_handle; + + // Query general attributes. + vkGetPhysicalDeviceProperties(device_handle, &device_info.properties); + vkGetPhysicalDeviceFeatures(device_handle, &device_info.features); + vkGetPhysicalDeviceMemoryProperties(device_handle, + &device_info.memory_properties); + + // Gather queue family properties. + vkGetPhysicalDeviceQueueFamilyProperties(device_handle, &count, nullptr); + device_info.queue_family_properties.resize(count); + vkGetPhysicalDeviceQueueFamilyProperties( + device_handle, &count, device_info.queue_family_properties.data()); + + // Gather queue family presentation support. + // TODO(benvanik): move to swap chain? + VkSurfaceKHR any_surface = nullptr; +#if XE_PLATFORM_WIN32 + VkWin32SurfaceCreateInfoKHR create_info; + create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; + create_info.pNext = nullptr; + create_info.flags = 0; + create_info.hinstance = + static_cast(any_target_window->native_platform_handle()); + create_info.hwnd = static_cast(any_target_window->native_handle()); + err = vkCreateWin32SurfaceKHR(handle, &create_info, nullptr, &any_surface); + CheckResult(err, "vkCreateWin32SurfaceKHR"); +#else +#error Platform not yet implemented. +#endif // XE_PLATFORM_WIN32 + device_info.queue_family_supports_present.resize( + device_info.queue_family_properties.size()); + for (size_t j = 0; j < device_info.queue_family_supports_present.size(); + ++j) { + err = vkGetPhysicalDeviceSurfaceSupportKHR( + device_handle, static_cast(j), any_surface, + &device_info.queue_family_supports_present[j]); + CheckResult(err, "vkGetPhysicalDeviceSurfaceSupportKHR"); + } + vkDestroySurfaceKHR(handle, any_surface, nullptr); + + // Gather layers. + std::vector layer_properties; + err = vkEnumerateDeviceLayerProperties(device_handle, &count, nullptr); + CheckResult(err, "vkEnumerateDeviceLayerProperties"); + layer_properties.resize(count); + err = vkEnumerateDeviceLayerProperties(device_handle, &count, + layer_properties.data()); + CheckResult(err, "vkEnumerateDeviceLayerProperties"); + for (size_t j = 0; j < layer_properties.size(); ++j) { + LayerInfo layer_info; + layer_info.properties = layer_properties[j]; + err = vkEnumerateDeviceExtensionProperties( + device_handle, layer_info.properties.layerName, &count, nullptr); + CheckResult(err, "vkEnumerateDeviceExtensionProperties"); + layer_info.extensions.resize(count); + err = vkEnumerateDeviceExtensionProperties( + device_handle, layer_info.properties.layerName, &count, + layer_info.extensions.data()); + CheckResult(err, "vkEnumerateDeviceExtensionProperties"); + device_info.layers.push_back(std::move(layer_info)); + } + + // Gather extensions. + err = vkEnumerateDeviceExtensionProperties(device_handle, nullptr, &count, + nullptr); + CheckResult(err, "vkEnumerateDeviceExtensionProperties"); + device_info.extensions.resize(count); + err = vkEnumerateDeviceExtensionProperties(device_handle, nullptr, &count, + device_info.extensions.data()); + CheckResult(err, "vkEnumerateDeviceExtensionProperties"); + + available_devices_.push_back(std::move(device_info)); + } + + XELOGVK("Found %d physical devices:", available_devices_.size()); + for (size_t i = 0; i < available_devices_.size(); ++i) { + auto& device_info = available_devices_[i]; + XELOGVK("- Device %d:", i); + DumpDeviceInfo(device_info); + } + + return true; +} + +void VulkanInstance::DumpLayers(const std::vector& layers, + const char* indent) { + for (size_t i = 0; i < layers.size(); ++i) { + auto& layer = layers[i]; + auto spec_version = Version::Parse(layer.properties.specVersion); + auto impl_version = Version::Parse(layer.properties.implementationVersion); + XELOGVK("%s- %s (spec: %s, impl: %s)", indent, layer.properties.layerName, + spec_version.pretty_string.c_str(), + impl_version.pretty_string.c_str()); + XELOGVK("%s %s", indent, layer.properties.description); + if (!layer.extensions.empty()) { + XELOGVK("%s %d extensions:", indent, layer.extensions.size()); + DumpExtensions(layer.extensions, std::strlen(indent) ? " " : " "); + } + } +} + +void VulkanInstance::DumpExtensions( + const std::vector& extensions, const char* indent) { + for (size_t i = 0; i < extensions.size(); ++i) { + auto& extension = extensions[i]; + auto version = Version::Parse(extension.specVersion); + XELOGVK("%s- %s (%s)", indent, extension.extensionName, + version.pretty_string.c_str()); + } +} + +void VulkanInstance::DumpDeviceInfo(const DeviceInfo& device_info) { + auto& properties = device_info.properties; + auto api_version = Version::Parse(properties.apiVersion); + auto driver_version = Version::Parse(properties.driverVersion); + XELOGVK(" apiVersion = %s", api_version.pretty_string.c_str()); + XELOGVK(" driverVersion = %s", driver_version.pretty_string.c_str()); + XELOGVK(" vendorId = 0x%04x", properties.vendorID); + XELOGVK(" deviceId = 0x%04x", properties.deviceID); + XELOGVK(" deviceType = %s", to_string(properties.deviceType)); + XELOGVK(" deviceName = %s", properties.deviceName); + + auto& memory_props = device_info.memory_properties; + XELOGVK(" Memory Heaps:"); + for (size_t j = 0; j < memory_props.memoryHeapCount; ++j) { + XELOGVK(" - Heap %u: %" PRIu64 " bytes", j, + memory_props.memoryHeaps[j].size); + for (size_t k = 0; k < memory_props.memoryTypeCount; ++k) { + if (memory_props.memoryTypes[k].heapIndex == j) { + XELOGVK(" - Type %u:", k); + auto type_flags = memory_props.memoryTypes[k].propertyFlags; + if (!type_flags) { + XELOGVK(" VK_MEMORY_PROPERTY_DEVICE_ONLY"); + } + if (type_flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { + XELOGVK(" VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"); + } + if (type_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { + XELOGVK(" VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"); + } + if (type_flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) { + XELOGVK(" VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"); + } + if (type_flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) { + XELOGVK(" VK_MEMORY_PROPERTY_HOST_CACHED_BIT"); + } + if (type_flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) { + XELOGVK(" VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"); + } + } + } + } + + XELOGVK(" Queue Families:"); + for (size_t j = 0; j < device_info.queue_family_properties.size(); ++j) { + auto& queue_props = device_info.queue_family_properties[j]; + XELOGVK(" - Queue %d:", j); + XELOGVK( + " queueFlags = %s, %s, %s, %s", + (queue_props.queueFlags & VK_QUEUE_GRAPHICS_BIT) ? "graphics" : "", + (queue_props.queueFlags & VK_QUEUE_COMPUTE_BIT) ? "compute" : "", + (queue_props.queueFlags & VK_QUEUE_TRANSFER_BIT) ? "transfer" : "", + (queue_props.queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) ? "sparse" : ""); + XELOGVK(" queueCount = %u", queue_props.queueCount); + XELOGVK(" timestampValidBits = %u", queue_props.timestampValidBits); + XELOGVK(" supportsPresent = %s", + device_info.queue_family_supports_present[j] ? "true" : "false"); + } + + XELOGVK(" Layers:"); + DumpLayers(device_info.layers, " "); + + XELOGVK(" Extensions:"); + DumpExtensions(device_info.extensions, " "); +} + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_instance.h b/src/xenia/ui/vulkan/vulkan_instance.h new file mode 100644 index 000000000..c292f3020 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_instance.h @@ -0,0 +1,95 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2014 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_INSTANCE_H_ +#define XENIA_UI_VULKAN_VULKAN_INSTANCE_H_ + +#include +#include +#include + +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_util.h" +#include "xenia/ui/window.h" + +namespace xe { +namespace ui { +namespace vulkan { + +// Wrappers and utilities for VkInstance. +class VulkanInstance { + public: + VulkanInstance(); + ~VulkanInstance(); + + VkInstance handle = nullptr; + + operator VkInstance() const { return handle; } + + // Declares a layer to verify and enable upon initialization. + // Must be called before Initialize. + void DeclareRequiredLayer(std::string name, uint32_t min_version, + bool is_optional) { + required_layers_.push_back({name, min_version, is_optional}); + } + + // Declares an extension to verify and enable upon initialization. + // Must be called before Initialize. + void DeclareRequiredExtension(std::string name, uint32_t min_version, + bool is_optional) { + required_extensions_.push_back({name, min_version, is_optional}); + } + + // Initializes the instance, querying and enabling extensions and layers and + // preparing the instance for general use. + // If initialization succeeds it's likely that no more failures beyond runtime + // issues will occur. + // TODO(benvanik): remove need for any_target_window - it's just for queries. + bool Initialize(Window* any_target_window); + + // Returns a list of all available devices as detected during initialization. + const std::vector& available_devices() const { + return available_devices_; + } + + private: + // Queries the system to find global extensions and layers. + bool QueryGlobals(); + + // Creates the instance, enabling required extensions and layers. + bool CreateInstance(); + void DestroyInstance(); + + // Enables debugging info and callbacks for supported layers. + void EnableDebugValidation(); + void DisableDebugValidation(); + + // Queries all available physical devices. + bool QueryDevices(Window* any_target_window); + + void DumpLayers(const std::vector& layers, const char* indent); + void DumpExtensions(const std::vector& extensions, + const char* indent); + void DumpDeviceInfo(const DeviceInfo& device_info); + + std::vector required_layers_; + std::vector required_extensions_; + + std::vector global_layers_; + std::vector global_extensions_; + std::vector available_devices_; + + VkDebugReportCallbackEXT dbg_report_callback_ = nullptr; +}; + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_INSTANCE_H_ diff --git a/src/xenia/ui/vulkan/vulkan_provider.cc b/src/xenia/ui/vulkan/vulkan_provider.cc new file mode 100644 index 000000000..300604bfb --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_provider.cc @@ -0,0 +1,107 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_provider.h" + +#include + +#include + +#include "xenia/base/logging.h" +#include "xenia/ui/vulkan/vulkan_context.h" +#include "xenia/ui/vulkan/vulkan_device.h" +#include "xenia/ui/vulkan/vulkan_instance.h" +#include "xenia/ui/vulkan/vulkan_util.h" + +DEFINE_uint64(vulkan_device_index, 0, "Index of the physical device to use."); + +namespace xe { +namespace ui { +namespace vulkan { + +std::unique_ptr VulkanProvider::Create(Window* main_window) { + std::unique_ptr provider(new VulkanProvider(main_window)); + if (!provider->Initialize()) { + xe::FatalError( + "Unable to initialize Vulkan graphics subsystem.\n" + "Ensure you have the latest drivers for your GPU and that it " + "supports Vulkan. See http://xenia.jp/faq/ for more information and a " + "list of supported GPUs."); + return nullptr; + } + return std::unique_ptr(provider.release()); +} + +VulkanProvider::VulkanProvider(Window* main_window) + : GraphicsProvider(main_window) {} + +VulkanProvider::~VulkanProvider() { + device_.reset(); + instance_.reset(); +} + +bool VulkanProvider::Initialize() { + instance_ = std::make_unique(); + + // Always enable the swapchain. + instance_->DeclareRequiredExtension("VK_KHR_surface", Version::Make(0, 0, 0), + false); + instance_->DeclareRequiredExtension("VK_KHR_win32_surface", + Version::Make(0, 0, 0), false); + + // Attempt initialization and device query. + if (!instance_->Initialize(main_window_)) { + XELOGE("Failed to initialize vulkan instance"); + return false; + } + + // Pick the device to use. + auto available_devices = instance_->available_devices(); + if (available_devices.empty()) { + XELOGE("No devices available for use"); + return false; + } + size_t device_index = + std::min(available_devices.size(), FLAGS_vulkan_device_index); + auto& device_info = available_devices[device_index]; + + // Create the device. + device_ = std::make_unique(instance_.get()); + device_->DeclareRequiredExtension("VK_KHR_swapchain", Version::Make(0, 0, 0), + false); + if (!device_->Initialize(device_info)) { + XELOGE("Unable to initialize device"); + return false; + } + + return true; +} + +std::unique_ptr VulkanProvider::CreateContext( + Window* target_window) { + auto new_context = + std::unique_ptr(new VulkanContext(this, target_window)); + if (!new_context->Initialize()) { + return nullptr; + } + return std::unique_ptr(new_context.release()); +} + +std::unique_ptr VulkanProvider::CreateOffscreenContext() { + auto new_context = + std::unique_ptr(new VulkanContext(this, nullptr)); + if (!new_context->Initialize()) { + return nullptr; + } + return std::unique_ptr(new_context.release()); +} + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_provider.h b/src/xenia/ui/vulkan/vulkan_provider.h new file mode 100644 index 000000000..efc174614 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_provider.h @@ -0,0 +1,50 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_PROVIDER_H_ +#define XENIA_UI_VULKAN_VULKAN_PROVIDER_H_ + +#include + +#include "xenia/ui/graphics_provider.h" + +namespace xe { +namespace ui { +namespace vulkan { + +class VulkanDevice; +class VulkanInstance; + +class VulkanProvider : public GraphicsProvider { + public: + ~VulkanProvider() override; + + static std::unique_ptr Create(Window* main_window); + + VulkanInstance* instance() const { return instance_.get(); } + VulkanDevice* device() const { return device_.get(); } + + std::unique_ptr CreateContext( + Window* target_window) override; + std::unique_ptr CreateOffscreenContext() override; + + protected: + explicit VulkanProvider(Window* main_window); + + bool Initialize(); + + std::unique_ptr instance_; + std::unique_ptr device_; +}; + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_PROVIDER_H_ diff --git a/src/xenia/ui/vulkan/vulkan_swap_chain.cc b/src/xenia/ui/vulkan/vulkan_swap_chain.cc new file mode 100644 index 000000000..ec640d92f --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_swap_chain.cc @@ -0,0 +1,510 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_swap_chain.h" + +#include + +#include +#include + +#include "xenia/base/assert.h" +#include "xenia/base/logging.h" +#include "xenia/base/math.h" +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_device.h" +#include "xenia/ui/vulkan/vulkan_instance.h" +#include "xenia/ui/vulkan/vulkan_util.h" + +DEFINE_bool(vulkan_random_clear_color, false, + "Randomizes framebuffer clear color."); + +namespace xe { +namespace ui { +namespace vulkan { + +VulkanSwapChain::VulkanSwapChain(VulkanInstance* instance, VulkanDevice* device) + : instance_(instance), device_(device) {} + +VulkanSwapChain::~VulkanSwapChain() { + for (auto& buffer : buffers_) { + DestroyBuffer(&buffer); + } + if (image_available_semaphore_) { + vkDestroySemaphore(*device_, image_available_semaphore_, nullptr); + } + if (render_pass_) { + vkDestroyRenderPass(*device_, render_pass_, nullptr); + } + if (render_cmd_buffer_) { + vkFreeCommandBuffers(*device_, cmd_pool_, 1, &render_cmd_buffer_); + } + if (cmd_pool_) { + vkDestroyCommandPool(*device_, cmd_pool_, nullptr); + } + // images_ doesn't need to be cleaned up as the swapchain does it implicitly. + if (handle) { + vkDestroySwapchainKHR(*device_, handle, nullptr); + handle = nullptr; + } + if (surface_) { + vkDestroySurfaceKHR(*instance_, surface_, nullptr); + } +} + +bool VulkanSwapChain::Initialize(VkSurfaceKHR surface) { + surface_ = surface; + + // Query supported target formats. + uint32_t count = 0; + auto err = + vkGetPhysicalDeviceSurfaceFormatsKHR(*device_, surface_, &count, nullptr); + CheckResult(err, "vkGetPhysicalDeviceSurfaceFormatsKHR"); + std::vector surface_formats; + surface_formats.resize(count); + err = vkGetPhysicalDeviceSurfaceFormatsKHR(*device_, surface_, &count, + surface_formats.data()); + CheckResult(err, "vkGetPhysicalDeviceSurfaceFormatsKHR"); + + // If the format list includes just one entry of VK_FORMAT_UNDEFINED the + // surface has no preferred format. + // Otherwise, at least one supported format will be returned. + assert_true(surface_formats.size() >= 1); + if (surface_formats.size() == 1 && + surface_formats[0].format == VK_FORMAT_UNDEFINED) { + // Fallback to common RGBA. + surface_format_ = VK_FORMAT_R8G8B8A8_UNORM; + } else { + // Use first defined format. + surface_format_ = surface_formats[0].format; + } + + // Query surface min/max/caps. + VkSurfaceCapabilitiesKHR surface_caps; + err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(*device_, surface_, + &surface_caps); + CheckResult(err, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); + + // Query surface properties so we can configure ourselves within bounds. + std::vector present_modes; + err = vkGetPhysicalDeviceSurfacePresentModesKHR(*device_, surface_, &count, + nullptr); + CheckResult(err, "vkGetPhysicalDeviceSurfacePresentModesKHR"); + present_modes.resize(count); + err = vkGetPhysicalDeviceSurfacePresentModesKHR(*device_, surface_, &count, + present_modes.data()); + CheckResult(err, "vkGetPhysicalDeviceSurfacePresentModesKHR"); + + // Calculate swapchain target dimensions. + VkExtent2D extent = surface_caps.currentExtent; + if (surface_caps.currentExtent.width == -1) { + assert_true(surface_caps.currentExtent.height == -1); + // Undefined extents, so we need to pick something. + XELOGI("Swap chain target surface extents undefined; guessing value"); + extent.width = 1280; + extent.height = 720; + } + surface_width_ = extent.width; + surface_height_ = extent.height; + + // Always prefer mailbox mode (non-tearing, low-latency). + // If it's not available we'll use immediate (tearing, low-latency). + // If not even that we fall back to FIFO, which sucks. + VkPresentModeKHR present_mode = VK_PRESENT_MODE_FIFO_KHR; + for (size_t i = 0; i < present_modes.size(); ++i) { + if (present_modes[i] == VK_PRESENT_MODE_MAILBOX_KHR) { + // This is the best, so early-out. + present_mode = VK_PRESENT_MODE_MAILBOX_KHR; + break; + } else if (present_modes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR) { + present_mode = VK_PRESENT_MODE_IMMEDIATE_KHR; + } + } + + // Determine the number of images (1 + number queued). + uint32_t image_count = surface_caps.minImageCount + 1; + if (surface_caps.maxImageCount > 0 && + image_count > surface_caps.maxImageCount) { + // Too many requested - use whatever we can. + XELOGI("Requested number of swapchain images (%d) exceeds maximum (%d)", + image_count, surface_caps.maxImageCount); + image_count = surface_caps.maxImageCount; + } + + // Always pass through whatever transform the surface started with (so long + // as it's supported). + VkSurfaceTransformFlagBitsKHR pre_transform = surface_caps.currentTransform; + + VkSwapchainCreateInfoKHR create_info; + create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + create_info.pNext = nullptr; + create_info.flags = 0; + create_info.surface = surface_; + create_info.minImageCount = image_count; + create_info.imageFormat = surface_format_; + create_info.imageColorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR; + create_info.imageExtent.width = extent.width; + create_info.imageExtent.height = extent.height; + create_info.imageArrayLayers = 1; + create_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + create_info.queueFamilyIndexCount = 0; + create_info.pQueueFamilyIndices = nullptr; + create_info.preTransform = pre_transform; + create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; + create_info.presentMode = present_mode; + create_info.clipped = VK_TRUE; + create_info.oldSwapchain = nullptr; + + XELOGVK("Creating swap chain:"); + XELOGVK(" minImageCount = %u", create_info.minImageCount); + XELOGVK(" imageFormat = %s", to_string(create_info.imageFormat)); + XELOGVK(" imageExtent = %d x %d", create_info.imageExtent.width, + create_info.imageExtent.height); + auto pre_transform_str = to_flags_string(create_info.preTransform); + XELOGVK(" preTransform = %s", pre_transform_str.c_str()); + XELOGVK(" imageArrayLayers = %u", create_info.imageArrayLayers); + XELOGVK(" presentMode = %s", to_string(create_info.presentMode)); + XELOGVK(" clipped = %s", create_info.clipped ? "true" : "false"); + XELOGVK(" imageColorSpace = %s", to_string(create_info.imageColorSpace)); + auto image_usage_flags_str = to_flags_string(create_info.imageUsage); + XELOGVK(" imageUsageFlags = %s", image_usage_flags_str.c_str()); + XELOGVK(" imageSharingMode = %s", to_string(create_info.imageSharingMode)); + XELOGVK(" queueFamilyCount = %u", create_info.queueFamilyIndexCount); + + err = vkCreateSwapchainKHR(*device_, &create_info, nullptr, &handle); + if (err) { + XELOGE("Failed to create swapchain: %s", to_string(err)); + return false; + } + + // Create the pool used for transient buffers, so we can reset them all at + // once. + VkCommandPoolCreateInfo cmd_pool_info; + cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + cmd_pool_info.pNext = nullptr; + cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + cmd_pool_info.queueFamilyIndex = device_->queue_family_index(); + err = vkCreateCommandPool(*device_, &cmd_pool_info, nullptr, &cmd_pool_); + CheckResult(err, "vkCreateCommandPool"); + + // Make a command buffer we'll do all our primary rendering from. + VkCommandBufferAllocateInfo cmd_buffer_info; + cmd_buffer_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + cmd_buffer_info.pNext = nullptr; + cmd_buffer_info.commandPool = cmd_pool_; + cmd_buffer_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + cmd_buffer_info.commandBufferCount = 1; + err = + vkAllocateCommandBuffers(*device_, &cmd_buffer_info, &render_cmd_buffer_); + CheckResult(err, "vkCreateCommandBuffer"); + + // Create the render pass used to draw to the swap chain. + // The actual framebuffer attached will depend on which image we are drawing + // into. + VkAttachmentDescription color_attachment; + color_attachment.flags = 0; + color_attachment.format = surface_format_; + color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; + color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + color_attachment.initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + color_attachment.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + VkAttachmentReference color_reference; + color_reference.attachment = 0; + color_reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + VkAttachmentReference depth_reference; + depth_reference.attachment = VK_ATTACHMENT_UNUSED; + depth_reference.layout = VK_IMAGE_LAYOUT_UNDEFINED; + VkSubpassDescription render_subpass; + render_subpass.flags = 0; + render_subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; + render_subpass.inputAttachmentCount = 0; + render_subpass.pInputAttachments = nullptr; + render_subpass.colorAttachmentCount = 1; + render_subpass.pColorAttachments = &color_reference; + render_subpass.pResolveAttachments = nullptr; + render_subpass.pDepthStencilAttachment = &depth_reference; + render_subpass.preserveAttachmentCount = 0, + render_subpass.pPreserveAttachments = nullptr; + VkRenderPassCreateInfo render_pass_info; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + render_pass_info.pNext = nullptr; + render_pass_info.attachmentCount = 1; + render_pass_info.pAttachments = &color_attachment; + render_pass_info.subpassCount = 1; + render_pass_info.pSubpasses = &render_subpass; + render_pass_info.dependencyCount = 0; + render_pass_info.pDependencies = nullptr; + err = vkCreateRenderPass(*device_, &render_pass_info, nullptr, &render_pass_); + CheckResult(err, "vkCreateRenderPass"); + + // Create a semaphore we'll use to synchronize with the swapchain. + VkSemaphoreCreateInfo semaphore_info; + semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + semaphore_info.pNext = nullptr; + semaphore_info.flags = 0; + err = vkCreateSemaphore(*device_, &semaphore_info, nullptr, + &image_available_semaphore_); + CheckResult(err, "vkCreateSemaphore"); + + // Get images we will be presenting to. + // Note that this may differ from our requested amount. + uint32_t actual_image_count = 0; + std::vector images; + err = vkGetSwapchainImagesKHR(*device_, handle, &actual_image_count, nullptr); + CheckResult(err, "vkGetSwapchainImagesKHR"); + images.resize(actual_image_count); + err = vkGetSwapchainImagesKHR(*device_, handle, &actual_image_count, + images.data()); + CheckResult(err, "vkGetSwapchainImagesKHR"); + + // Create all buffers. + buffers_.resize(images.size()); + for (size_t i = 0; i < buffers_.size(); ++i) { + if (!InitializeBuffer(&buffers_[i], images[i])) { + XELOGE("Failed to initialize a swapchain buffer"); + return false; + } + } + + XELOGVK("Swap chain initialized successfully!"); + return true; +} + +bool VulkanSwapChain::InitializeBuffer(Buffer* buffer, VkImage target_image) { + DestroyBuffer(buffer); + buffer->image = target_image; + + // Create an image view for the presentation image. + // This will be used as a framebuffer attachment. + VkImageViewCreateInfo image_view_info; + image_view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + image_view_info.pNext = nullptr; + image_view_info.flags = 0; + image_view_info.image = buffer->image; + image_view_info.viewType = VK_IMAGE_VIEW_TYPE_2D; + image_view_info.format = surface_format_; + image_view_info.components.r = VK_COMPONENT_SWIZZLE_R; + image_view_info.components.g = VK_COMPONENT_SWIZZLE_G; + image_view_info.components.b = VK_COMPONENT_SWIZZLE_B; + image_view_info.components.a = VK_COMPONENT_SWIZZLE_A; + image_view_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + image_view_info.subresourceRange.baseMipLevel = 0; + image_view_info.subresourceRange.levelCount = 1; + image_view_info.subresourceRange.baseArrayLayer = 0; + image_view_info.subresourceRange.layerCount = 1; + auto err = vkCreateImageView(*device_, &image_view_info, nullptr, + &buffer->image_view); + CheckResult(err, "vkCreateImageView"); + + // Create the framebuffer used to render into this image. + VkImageView attachments[] = {buffer->image_view}; + VkFramebufferCreateInfo framebuffer_info; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.pNext = nullptr; + framebuffer_info.renderPass = render_pass_; + framebuffer_info.attachmentCount = + static_cast(xe::countof(attachments)); + framebuffer_info.pAttachments = attachments; + framebuffer_info.width = surface_width_; + framebuffer_info.height = surface_height_; + framebuffer_info.layers = 1; + err = vkCreateFramebuffer(*device_, &framebuffer_info, nullptr, + &buffer->framebuffer); + CheckResult(err, "vkCreateFramebuffer"); + + return true; +} + +void VulkanSwapChain::DestroyBuffer(Buffer* buffer) { + if (buffer->framebuffer) { + vkDestroyFramebuffer(*device_, buffer->framebuffer, nullptr); + buffer->framebuffer = nullptr; + } + if (buffer->image_view) { + vkDestroyImageView(*device_, buffer->image_view, nullptr); + buffer->image_view = nullptr; + } + // Image is taken care of by the presentation engine. + buffer->image = nullptr; +} + +bool VulkanSwapChain::Begin() { + // Get the index of the next available swapchain image. + auto err = + vkAcquireNextImageKHR(*device_, handle, 0, image_available_semaphore_, + nullptr, ¤t_buffer_index_); + CheckResult(err, "vkAcquireNextImageKHR"); + + // Wait for the acquire semaphore to be signaled so that the following + // operations know they can start modifying the image. + VkSubmitInfo wait_submit_info; + wait_submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + wait_submit_info.pNext = nullptr; + wait_submit_info.waitSemaphoreCount = 1; + wait_submit_info.pWaitSemaphores = &image_available_semaphore_; + wait_submit_info.commandBufferCount = 0; + wait_submit_info.pCommandBuffers = nullptr; + wait_submit_info.signalSemaphoreCount = 0; + wait_submit_info.pSignalSemaphores = nullptr; + err = vkQueueSubmit(device_->primary_queue(), 1, &wait_submit_info, nullptr); + CheckResult(err, "vkQueueSubmit"); + + // Reset all command buffers. + vkResetCommandBuffer(render_cmd_buffer_, + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT); + auto& current_buffer = buffers_[current_buffer_index_]; + + // Build the command buffer that will execute all queued rendering buffers. + VkCommandBufferBeginInfo begin_info; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + begin_info.pNext = nullptr; + begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT; + begin_info.pInheritanceInfo = nullptr; + err = vkBeginCommandBuffer(render_cmd_buffer_, &begin_info); + CheckResult(err, "vkBeginCommandBuffer"); + + // Transition the image to a format we can render to. + VkImageMemoryBarrier pre_image_memory_barrier; + pre_image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + pre_image_memory_barrier.pNext = nullptr; + pre_image_memory_barrier.srcAccessMask = 0; + pre_image_memory_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + pre_image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + pre_image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + pre_image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + pre_image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + pre_image_memory_barrier.image = current_buffer.image; + pre_image_memory_barrier.subresourceRange.aspectMask = + VK_IMAGE_ASPECT_COLOR_BIT; + pre_image_memory_barrier.subresourceRange.baseMipLevel = 0; + pre_image_memory_barrier.subresourceRange.levelCount = 1; + pre_image_memory_barrier.subresourceRange.baseArrayLayer = 0; + pre_image_memory_barrier.subresourceRange.layerCount = 1; + vkCmdPipelineBarrier(render_cmd_buffer_, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, + nullptr, 1, &pre_image_memory_barrier); + + // Begin render pass. + VkClearValue color_clear_value; + color_clear_value.color.float32[0] = 238 / 255.0f; + color_clear_value.color.float32[1] = 238 / 255.0f; + color_clear_value.color.float32[2] = 238 / 255.0f; + color_clear_value.color.float32[3] = 1.0f; + if (FLAGS_vulkan_random_clear_color) { + color_clear_value.color.float32[0] = + rand() / static_cast(RAND_MAX); // NOLINT(runtime/threadsafe_fn) + color_clear_value.color.float32[1] = 1.0f; + color_clear_value.color.float32[2] = 0.0f; + } + VkClearValue clear_values[] = {color_clear_value}; + VkRenderPassBeginInfo render_pass_begin_info; + render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + render_pass_begin_info.pNext = nullptr; + render_pass_begin_info.renderPass = render_pass_; + render_pass_begin_info.framebuffer = current_buffer.framebuffer; + render_pass_begin_info.renderArea.offset.x = 0; + render_pass_begin_info.renderArea.offset.y = 0; + render_pass_begin_info.renderArea.extent.width = surface_width_; + render_pass_begin_info.renderArea.extent.height = surface_height_; + render_pass_begin_info.clearValueCount = + static_cast(xe::countof(clear_values)); + render_pass_begin_info.pClearValues = clear_values; + vkCmdBeginRenderPass(render_cmd_buffer_, &render_pass_begin_info, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS); + + return true; +} + +bool VulkanSwapChain::End() { + auto& current_buffer = buffers_[current_buffer_index_]; + + // End render pass. + vkCmdEndRenderPass(render_cmd_buffer_); + + // Transition the image to a format the presentation engine can source from. + VkImageMemoryBarrier post_image_memory_barrier; + post_image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + post_image_memory_barrier.pNext = nullptr; + post_image_memory_barrier.srcAccessMask = + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + post_image_memory_barrier.dstAccessMask = 0; + post_image_memory_barrier.oldLayout = + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + post_image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + post_image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + post_image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + post_image_memory_barrier.image = current_buffer.image; + post_image_memory_barrier.subresourceRange.aspectMask = + VK_IMAGE_ASPECT_COLOR_BIT; + post_image_memory_barrier.subresourceRange.baseMipLevel = 0; + post_image_memory_barrier.subresourceRange.levelCount = 1; + post_image_memory_barrier.subresourceRange.baseArrayLayer = 0; + post_image_memory_barrier.subresourceRange.layerCount = 1; + vkCmdPipelineBarrier(render_cmd_buffer_, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, + nullptr, 1, &post_image_memory_barrier); + + auto err = vkEndCommandBuffer(render_cmd_buffer_); + CheckResult(err, "vkEndCommandBuffer"); + + // Submit rendering. + VkSubmitInfo render_submit_info; + render_submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + render_submit_info.pNext = nullptr; + render_submit_info.waitSemaphoreCount = 0; + render_submit_info.pWaitSemaphores = nullptr; + render_submit_info.commandBufferCount = 1; + render_submit_info.pCommandBuffers = &render_cmd_buffer_; + render_submit_info.signalSemaphoreCount = 0; + render_submit_info.pSignalSemaphores = nullptr; + err = + vkQueueSubmit(device_->primary_queue(), 1, &render_submit_info, nullptr); + CheckResult(err, "vkQueueSubmit"); + + // Queue the present of our current image. + const VkSwapchainKHR swap_chains[] = {handle}; + const uint32_t swap_chain_image_indices[] = {current_buffer_index_}; + VkPresentInfoKHR present_info; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + present_info.pNext = nullptr; + present_info.waitSemaphoreCount = 0; + present_info.pWaitSemaphores = nullptr; + present_info.swapchainCount = static_cast(xe::countof(swap_chains)); + present_info.pSwapchains = swap_chains; + present_info.pImageIndices = swap_chain_image_indices; + present_info.pResults = nullptr; + err = vkQueuePresentKHR(device_->primary_queue(), &present_info); + switch (err) { + case VK_SUCCESS: + break; + case VK_SUBOPTIMAL_KHR: + // We are not rendering at the right size - but the presentation engine + // will scale the output for us. + break; + case VK_ERROR_OUT_OF_DATE_KHR: + // Lost presentation ability; need to recreate the swapchain. + // TODO(benvanik): recreate swapchain. + assert_always("Swapchain recreation not implemented"); + break; + default: + XELOGE("Failed to queue present: %s", to_string(err)); + assert_always("Unexpected queue present failure"); + return false; + } + + return true; +} + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_swap_chain.h b/src/xenia/ui/vulkan/vulkan_swap_chain.h new file mode 100644 index 000000000..18bb26cee --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_swap_chain.h @@ -0,0 +1,80 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2014 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_SWAP_CHAIN_H_ +#define XENIA_UI_VULKAN_VULKAN_SWAP_CHAIN_H_ + +#include +#include +#include + +#include "xenia/ui/vulkan/vulkan.h" +#include "xenia/ui/vulkan/vulkan_util.h" + +namespace xe { +namespace ui { +namespace vulkan { + +class VulkanDevice; +class VulkanInstance; + +class VulkanSwapChain { + public: + VulkanSwapChain(VulkanInstance* instance, VulkanDevice* device); + ~VulkanSwapChain(); + + VkSwapchainKHR handle = nullptr; + + operator VkSwapchainKHR() const { return handle; } + + uint32_t surface_width() const { return surface_width_; } + uint32_t surface_height() const { return surface_height_; } + + // Render pass used for compositing. + VkRenderPass render_pass() const { return render_pass_; } + // Render command buffer, active inside the render pass from Begin to End. + VkCommandBuffer render_cmd_buffer() const { return render_cmd_buffer_; } + + bool Initialize(VkSurfaceKHR surface); + + // Begins the swap operation, preparing state for rendering. + bool Begin(); + // Ends the swap operation, finalizing rendering and presenting the results. + bool End(); + + private: + struct Buffer { + VkImage image = nullptr; + VkImageView image_view = nullptr; + VkFramebuffer framebuffer = nullptr; + }; + + bool InitializeBuffer(Buffer* buffer, VkImage target_image); + void DestroyBuffer(Buffer* buffer); + + VulkanInstance* instance_ = nullptr; + VulkanDevice* device_ = nullptr; + + VkSurfaceKHR surface_ = nullptr; + uint32_t surface_width_ = 0; + uint32_t surface_height_ = 0; + VkFormat surface_format_ = VK_FORMAT_UNDEFINED; + VkCommandPool cmd_pool_ = nullptr; + VkCommandBuffer render_cmd_buffer_ = nullptr; + VkRenderPass render_pass_ = nullptr; + VkSemaphore image_available_semaphore_ = nullptr; + uint32_t current_buffer_index_ = 0; + std::vector buffers_; +}; + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_SWAP_CHAIN_H_ diff --git a/src/xenia/ui/vulkan/vulkan_util.cc b/src/xenia/ui/vulkan/vulkan_util.cc new file mode 100644 index 000000000..54402286f --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_util.cc @@ -0,0 +1,464 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include "xenia/ui/vulkan/vulkan_util.h" + +#include "xenia/base/assert.h" +#include "xenia/base/logging.h" + +namespace xe { +namespace ui { +namespace vulkan { + +uint32_t Version::Make(uint32_t major, uint32_t minor, uint32_t patch) { + return VK_MAKE_VERSION(major, minor, patch); +} + +Version Version::Parse(uint32_t value) { + Version version; + version.major = VK_VERSION_MAJOR(value); + version.minor = VK_VERSION_MINOR(value); + version.patch = VK_VERSION_PATCH(value); + version.pretty_string = xe::format_string("%u.%u.%u", version.major, + version.minor, version.patch); + return version; +} + +const char* to_string(VkFormat format) { + switch (format) { +#define STR(r) \ + case r: \ + return #r + STR(VK_FORMAT_UNDEFINED); + STR(VK_FORMAT_R4G4_UNORM_PACK8); + STR(VK_FORMAT_R4G4B4A4_UNORM_PACK16); + STR(VK_FORMAT_B4G4R4A4_UNORM_PACK16); + STR(VK_FORMAT_R5G6B5_UNORM_PACK16); + STR(VK_FORMAT_B5G6R5_UNORM_PACK16); + STR(VK_FORMAT_R5G5B5A1_UNORM_PACK16); + STR(VK_FORMAT_B5G5R5A1_UNORM_PACK16); + STR(VK_FORMAT_A1R5G5B5_UNORM_PACK16); + STR(VK_FORMAT_R8_UNORM); + STR(VK_FORMAT_R8_SNORM); + STR(VK_FORMAT_R8_USCALED); + STR(VK_FORMAT_R8_SSCALED); + STR(VK_FORMAT_R8_UINT); + STR(VK_FORMAT_R8_SINT); + STR(VK_FORMAT_R8_SRGB); + STR(VK_FORMAT_R8G8_UNORM); + STR(VK_FORMAT_R8G8_SNORM); + STR(VK_FORMAT_R8G8_USCALED); + STR(VK_FORMAT_R8G8_SSCALED); + STR(VK_FORMAT_R8G8_UINT); + STR(VK_FORMAT_R8G8_SINT); + STR(VK_FORMAT_R8G8_SRGB); + STR(VK_FORMAT_R8G8B8_UNORM); + STR(VK_FORMAT_R8G8B8_SNORM); + STR(VK_FORMAT_R8G8B8_USCALED); + STR(VK_FORMAT_R8G8B8_SSCALED); + STR(VK_FORMAT_R8G8B8_UINT); + STR(VK_FORMAT_R8G8B8_SINT); + STR(VK_FORMAT_R8G8B8_SRGB); + STR(VK_FORMAT_B8G8R8_UNORM); + STR(VK_FORMAT_B8G8R8_SNORM); + STR(VK_FORMAT_B8G8R8_USCALED); + STR(VK_FORMAT_B8G8R8_SSCALED); + STR(VK_FORMAT_B8G8R8_UINT); + STR(VK_FORMAT_B8G8R8_SINT); + STR(VK_FORMAT_B8G8R8_SRGB); + STR(VK_FORMAT_R8G8B8A8_UNORM); + STR(VK_FORMAT_R8G8B8A8_SNORM); + STR(VK_FORMAT_R8G8B8A8_USCALED); + STR(VK_FORMAT_R8G8B8A8_SSCALED); + STR(VK_FORMAT_R8G8B8A8_UINT); + STR(VK_FORMAT_R8G8B8A8_SINT); + STR(VK_FORMAT_R8G8B8A8_SRGB); + STR(VK_FORMAT_B8G8R8A8_UNORM); + STR(VK_FORMAT_B8G8R8A8_SNORM); + STR(VK_FORMAT_B8G8R8A8_USCALED); + STR(VK_FORMAT_B8G8R8A8_SSCALED); + STR(VK_FORMAT_B8G8R8A8_UINT); + STR(VK_FORMAT_B8G8R8A8_SINT); + STR(VK_FORMAT_B8G8R8A8_SRGB); + STR(VK_FORMAT_A8B8G8R8_UNORM_PACK32); + STR(VK_FORMAT_A8B8G8R8_SNORM_PACK32); + STR(VK_FORMAT_A8B8G8R8_USCALED_PACK32); + STR(VK_FORMAT_A8B8G8R8_SSCALED_PACK32); + STR(VK_FORMAT_A8B8G8R8_UINT_PACK32); + STR(VK_FORMAT_A8B8G8R8_SINT_PACK32); + STR(VK_FORMAT_A8B8G8R8_SRGB_PACK32); + STR(VK_FORMAT_A2R10G10B10_UNORM_PACK32); + STR(VK_FORMAT_A2R10G10B10_SNORM_PACK32); + STR(VK_FORMAT_A2R10G10B10_USCALED_PACK32); + STR(VK_FORMAT_A2R10G10B10_SSCALED_PACK32); + STR(VK_FORMAT_A2R10G10B10_UINT_PACK32); + STR(VK_FORMAT_A2R10G10B10_SINT_PACK32); + STR(VK_FORMAT_A2B10G10R10_UNORM_PACK32); + STR(VK_FORMAT_A2B10G10R10_SNORM_PACK32); + STR(VK_FORMAT_A2B10G10R10_USCALED_PACK32); + STR(VK_FORMAT_A2B10G10R10_SSCALED_PACK32); + STR(VK_FORMAT_A2B10G10R10_UINT_PACK32); + STR(VK_FORMAT_A2B10G10R10_SINT_PACK32); + STR(VK_FORMAT_R16_UNORM); + STR(VK_FORMAT_R16_SNORM); + STR(VK_FORMAT_R16_USCALED); + STR(VK_FORMAT_R16_SSCALED); + STR(VK_FORMAT_R16_UINT); + STR(VK_FORMAT_R16_SINT); + STR(VK_FORMAT_R16_SFLOAT); + STR(VK_FORMAT_R16G16_UNORM); + STR(VK_FORMAT_R16G16_SNORM); + STR(VK_FORMAT_R16G16_USCALED); + STR(VK_FORMAT_R16G16_SSCALED); + STR(VK_FORMAT_R16G16_UINT); + STR(VK_FORMAT_R16G16_SINT); + STR(VK_FORMAT_R16G16_SFLOAT); + STR(VK_FORMAT_R16G16B16_UNORM); + STR(VK_FORMAT_R16G16B16_SNORM); + STR(VK_FORMAT_R16G16B16_USCALED); + STR(VK_FORMAT_R16G16B16_SSCALED); + STR(VK_FORMAT_R16G16B16_UINT); + STR(VK_FORMAT_R16G16B16_SINT); + STR(VK_FORMAT_R16G16B16_SFLOAT); + STR(VK_FORMAT_R16G16B16A16_UNORM); + STR(VK_FORMAT_R16G16B16A16_SNORM); + STR(VK_FORMAT_R16G16B16A16_USCALED); + STR(VK_FORMAT_R16G16B16A16_SSCALED); + STR(VK_FORMAT_R16G16B16A16_UINT); + STR(VK_FORMAT_R16G16B16A16_SINT); + STR(VK_FORMAT_R16G16B16A16_SFLOAT); + STR(VK_FORMAT_R32_UINT); + STR(VK_FORMAT_R32_SINT); + STR(VK_FORMAT_R32_SFLOAT); + STR(VK_FORMAT_R32G32_UINT); + STR(VK_FORMAT_R32G32_SINT); + STR(VK_FORMAT_R32G32_SFLOAT); + STR(VK_FORMAT_R32G32B32_UINT); + STR(VK_FORMAT_R32G32B32_SINT); + STR(VK_FORMAT_R32G32B32_SFLOAT); + STR(VK_FORMAT_R32G32B32A32_UINT); + STR(VK_FORMAT_R32G32B32A32_SINT); + STR(VK_FORMAT_R32G32B32A32_SFLOAT); + STR(VK_FORMAT_R64_UINT); + STR(VK_FORMAT_R64_SINT); + STR(VK_FORMAT_R64_SFLOAT); + STR(VK_FORMAT_R64G64_UINT); + STR(VK_FORMAT_R64G64_SINT); + STR(VK_FORMAT_R64G64_SFLOAT); + STR(VK_FORMAT_R64G64B64_UINT); + STR(VK_FORMAT_R64G64B64_SINT); + STR(VK_FORMAT_R64G64B64_SFLOAT); + STR(VK_FORMAT_R64G64B64A64_UINT); + STR(VK_FORMAT_R64G64B64A64_SINT); + STR(VK_FORMAT_R64G64B64A64_SFLOAT); + STR(VK_FORMAT_B10G11R11_UFLOAT_PACK32); + STR(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32); + STR(VK_FORMAT_D16_UNORM); + STR(VK_FORMAT_X8_D24_UNORM_PACK32); + STR(VK_FORMAT_D32_SFLOAT); + STR(VK_FORMAT_S8_UINT); + STR(VK_FORMAT_D16_UNORM_S8_UINT); + STR(VK_FORMAT_D24_UNORM_S8_UINT); + STR(VK_FORMAT_D32_SFLOAT_S8_UINT); + STR(VK_FORMAT_BC1_RGB_UNORM_BLOCK); + STR(VK_FORMAT_BC1_RGB_SRGB_BLOCK); + STR(VK_FORMAT_BC1_RGBA_UNORM_BLOCK); + STR(VK_FORMAT_BC1_RGBA_SRGB_BLOCK); + STR(VK_FORMAT_BC2_UNORM_BLOCK); + STR(VK_FORMAT_BC2_SRGB_BLOCK); + STR(VK_FORMAT_BC3_UNORM_BLOCK); + STR(VK_FORMAT_BC3_SRGB_BLOCK); + STR(VK_FORMAT_BC4_UNORM_BLOCK); + STR(VK_FORMAT_BC4_SNORM_BLOCK); + STR(VK_FORMAT_BC5_UNORM_BLOCK); + STR(VK_FORMAT_BC5_SNORM_BLOCK); + STR(VK_FORMAT_BC6H_UFLOAT_BLOCK); + STR(VK_FORMAT_BC6H_SFLOAT_BLOCK); + STR(VK_FORMAT_BC7_UNORM_BLOCK); + STR(VK_FORMAT_BC7_SRGB_BLOCK); + STR(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK); + STR(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK); + STR(VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK); + STR(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK); + STR(VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK); + STR(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK); + STR(VK_FORMAT_EAC_R11_UNORM_BLOCK); + STR(VK_FORMAT_EAC_R11_SNORM_BLOCK); + STR(VK_FORMAT_EAC_R11G11_UNORM_BLOCK); + STR(VK_FORMAT_EAC_R11G11_SNORM_BLOCK); + STR(VK_FORMAT_ASTC_4x4_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_4x4_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_5x4_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_5x4_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_5x5_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_5x5_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_6x5_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_6x5_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_6x6_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_6x6_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_8x5_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_8x5_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_8x6_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_8x6_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_8x8_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_8x8_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_10x5_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_10x5_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_10x6_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_10x6_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_10x8_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_10x8_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_10x10_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_10x10_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_12x10_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_12x10_SRGB_BLOCK); + STR(VK_FORMAT_ASTC_12x12_UNORM_BLOCK); + STR(VK_FORMAT_ASTC_12x12_SRGB_BLOCK); +#undef STR + default: + return "UNKNOWN_FORMAT"; + } +} + +const char* to_string(VkPhysicalDeviceType type) { + switch (type) { +#define STR(r) \ + case r: \ + return #r + STR(VK_PHYSICAL_DEVICE_TYPE_OTHER); + STR(VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU); + STR(VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU); + STR(VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU); + STR(VK_PHYSICAL_DEVICE_TYPE_CPU); +#undef STR + default: + return "UNKNOWN_DEVICE"; + } +} + +const char* to_string(VkSharingMode sharing_mode) { + switch (sharing_mode) { +#define STR(r) \ + case r: \ + return #r + STR(VK_SHARING_MODE_EXCLUSIVE); + STR(VK_SHARING_MODE_CONCURRENT); +#undef STR + default: + return "UNKNOWN_SHARING_MODE"; + } +} + +const char* to_string(VkResult result) { + switch (result) { +#define STR(r) \ + case r: \ + return #r + STR(VK_SUCCESS); + STR(VK_NOT_READY); + STR(VK_TIMEOUT); + STR(VK_EVENT_SET); + STR(VK_EVENT_RESET); + STR(VK_INCOMPLETE); + STR(VK_ERROR_OUT_OF_HOST_MEMORY); + STR(VK_ERROR_OUT_OF_DEVICE_MEMORY); + STR(VK_ERROR_INITIALIZATION_FAILED); + STR(VK_ERROR_DEVICE_LOST); + STR(VK_ERROR_MEMORY_MAP_FAILED); + STR(VK_ERROR_LAYER_NOT_PRESENT); + STR(VK_ERROR_EXTENSION_NOT_PRESENT); + STR(VK_ERROR_FEATURE_NOT_PRESENT); + STR(VK_ERROR_INCOMPATIBLE_DRIVER); + STR(VK_ERROR_TOO_MANY_OBJECTS); + STR(VK_ERROR_FORMAT_NOT_SUPPORTED); + STR(VK_ERROR_SURFACE_LOST_KHR); + STR(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR); + STR(VK_SUBOPTIMAL_KHR); + STR(VK_ERROR_OUT_OF_DATE_KHR); + STR(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR); + STR(VK_ERROR_VALIDATION_FAILED_EXT); +#undef STR + default: + return "UNKNOWN_RESULT"; + } +} + +std::string to_flags_string(VkImageUsageFlags flags) { + std::string result; +#define OR_FLAG(f) \ + if (flags & f) { \ + if (!result.empty()) { \ + result += " | "; \ + } \ + result += #f; \ + } + OR_FLAG(VK_IMAGE_USAGE_TRANSFER_SRC_BIT); + OR_FLAG(VK_IMAGE_USAGE_TRANSFER_DST_BIT); + OR_FLAG(VK_IMAGE_USAGE_SAMPLED_BIT); + OR_FLAG(VK_IMAGE_USAGE_STORAGE_BIT); + OR_FLAG(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT); + OR_FLAG(VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT); + OR_FLAG(VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT); + OR_FLAG(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT); +#undef OR_FLAG + return result; +} + +std::string to_flags_string(VkSurfaceTransformFlagBitsKHR flags) { + std::string result; +#define OR_FLAG(f) \ + if (flags & f) { \ + if (!result.empty()) { \ + result += " | "; \ + } \ + result += #f; \ + } + OR_FLAG(VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR); + OR_FLAG(VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR); +#undef OR_FLAG + return result; +} + +const char* to_string(VkColorSpaceKHR color_space) { + switch (color_space) { +#define STR(r) \ + case r: \ + return #r + STR(VK_COLORSPACE_SRGB_NONLINEAR_KHR); +#undef STR + default: + return "UNKNOWN_COLORSPACE"; + } +} + +const char* to_string(VkPresentModeKHR present_mode) { + switch (present_mode) { +#define STR(r) \ + case r: \ + return #r + STR(VK_PRESENT_MODE_IMMEDIATE_KHR); + STR(VK_PRESENT_MODE_MAILBOX_KHR); + STR(VK_PRESENT_MODE_FIFO_KHR); + STR(VK_PRESENT_MODE_FIFO_RELAXED_KHR); +#undef STR + default: + return "UNKNOWN_PRESENT_MODE"; + } +} + +void FatalVulkanError(std::string error) { + xe::FatalError( + error + + "\nEnsure you have the latest drivers for your GPU and that it supports " + "Vulkan. See http://xenia.jp/faq/ for more information and a list" + "of supported GPUs."); +} + +void CheckResult(VkResult result, const char* action) { + if (result) { + XELOGE("Vulkan check: %s returned %s", action, to_string(result)); + } + assert_true(result == VK_SUCCESS, action); +} + +std::pair> CheckRequirements( + const std::vector& requirements, + const std::vector& layer_infos) { + bool any_missing = false; + std::vector enabled_layers; + for (auto& requirement : requirements) { + bool found = false; + for (size_t j = 0; j < layer_infos.size(); ++j) { + auto layer_name = layer_infos[j].properties.layerName; + auto layer_version = + Version::Parse(layer_infos[j].properties.specVersion); + if (requirement.name == layer_name) { + found = true; + if (requirement.min_version > layer_infos[j].properties.specVersion) { + if (requirement.is_optional) { + XELOGVK("- optional layer %s (%s) version mismatch", layer_name, + layer_version.pretty_string.c_str()); + continue; + } + XELOGE("ERROR: required layer %s (%s) version mismatch", layer_name, + layer_version.pretty_string.c_str()); + any_missing = true; + break; + } + XELOGVK("- enabling layer %s (%s)", layer_name, + layer_version.pretty_string.c_str()); + enabled_layers.push_back(layer_name); + break; + } + } + if (!found) { + if (requirement.is_optional) { + XELOGVK("- optional layer %s not found", requirement.name.c_str()); + } else { + XELOGE("ERROR: required layer %s not found", requirement.name.c_str()); + any_missing = true; + } + } + } + return {!any_missing, enabled_layers}; +} + +std::pair> CheckRequirements( + const std::vector& requirements, + const std::vector& extension_properties) { + bool any_missing = false; + std::vector enabled_extensions; + for (auto& requirement : requirements) { + bool found = false; + for (size_t j = 0; j < extension_properties.size(); ++j) { + auto extension_name = extension_properties[j].extensionName; + auto extension_version = + Version::Parse(extension_properties[j].specVersion); + if (requirement.name == extension_name) { + found = true; + if (requirement.min_version > extension_properties[j].specVersion) { + if (requirement.is_optional) { + XELOGVK("- optional extension %s (%s) version mismatch", + extension_name, extension_version.pretty_string.c_str()); + continue; + } + XELOGE("ERROR: required extension %s (%s) version mismatch", + extension_name, extension_version.pretty_string.c_str()); + any_missing = true; + break; + } + XELOGVK("- enabling extension %s (%s)", extension_name, + extension_version.pretty_string.c_str()); + enabled_extensions.push_back(extension_name); + break; + } + } + if (!found) { + if (requirement.is_optional) { + XELOGVK("- optional extension %s not found", requirement.name.c_str()); + } else { + XELOGE("ERROR: required extension %s not found", + requirement.name.c_str()); + any_missing = true; + } + } + } + return {!any_missing, enabled_extensions}; +} + +} // namespace vulkan +} // namespace ui +} // namespace xe diff --git a/src/xenia/ui/vulkan/vulkan_util.h b/src/xenia/ui/vulkan/vulkan_util.h new file mode 100644 index 000000000..fcf9e4f8f --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_util.h @@ -0,0 +1,101 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#ifndef XENIA_UI_VULKAN_VULKAN_UTIL_H_ +#define XENIA_UI_VULKAN_VULKAN_UTIL_H_ + +#include +#include + +#include "xenia/ui/vulkan/vulkan.h" + +namespace xe { +namespace ui { +class Window; +} // namespace ui +} // namespace xe + +namespace xe { +namespace ui { +namespace vulkan { + +struct Version { + uint32_t major; + uint32_t minor; + uint32_t patch; + std::string pretty_string; + + static uint32_t Make(uint32_t major, uint32_t minor, uint32_t patch); + + static Version Parse(uint32_t value); +}; + +const char* to_string(VkFormat format); +const char* to_string(VkPhysicalDeviceType type); +const char* to_string(VkSharingMode sharing_mode); +const char* to_string(VkResult result); + +std::string to_flags_string(VkImageUsageFlags flags); +std::string to_flags_string(VkSurfaceTransformFlagBitsKHR flags); + +const char* to_string(VkColorSpaceKHR color_space); +const char* to_string(VkPresentModeKHR present_mode); + +// Throws a fatal error with some Vulkan help text. +void FatalVulkanError(std::string error); + +// Logs and assets expecting the result to be VK_SUCCESS. +void CheckResult(VkResult result, const char* action); + +struct LayerInfo { + VkLayerProperties properties; + std::vector extensions; +}; + +struct DeviceInfo { + VkPhysicalDevice handle; + VkPhysicalDeviceProperties properties; + VkPhysicalDeviceFeatures features; + VkPhysicalDeviceMemoryProperties memory_properties; + std::vector queue_family_properties; + std::vector queue_family_supports_present; + std::vector layers; + std::vector extensions; +}; + +// Defines a requirement for a layer or extension, used to both verify and +// enable them on initialization. +struct Requirement { + // Layer or extension name. + std::string name; + // Minimum required spec version of the layer or extension. + uint32_t min_version; + // True if the requirement is optional (will not cause verification to fail). + bool is_optional; +}; + +// Gets a list of enabled layer names based on the given layer requirements and +// available layer info. +// Returns a boolean indicating whether all required layers are present. +std::pair> CheckRequirements( + const std::vector& requirements, + const std::vector& layer_infos); + +// Gets a list of enabled extension names based on the given extension +// requirements and available extensions. +// Returns a boolean indicating whether all required extensions are present. +std::pair> CheckRequirements( + const std::vector& requirements, + const std::vector& extension_properties); + +} // namespace vulkan +} // namespace ui +} // namespace xe + +#endif // XENIA_UI_VULKAN_VULKAN_UTIL_H_ diff --git a/src/xenia/ui/vulkan/vulkan_window_demo.cc b/src/xenia/ui/vulkan/vulkan_window_demo.cc new file mode 100644 index 000000000..fad5e90e2 --- /dev/null +++ b/src/xenia/ui/vulkan/vulkan_window_demo.cc @@ -0,0 +1,30 @@ +/** + ****************************************************************************** + * Xenia : Xbox 360 Emulator Research Project * + ****************************************************************************** + * Copyright 2016 Ben Vanik. All rights reserved. * + * Released under the BSD license - see LICENSE in the root for more details. * + ****************************************************************************** + */ + +#include +#include + +#include "xenia/base/main.h" +#include "xenia/ui/vulkan/vulkan_provider.h" +#include "xenia/ui/window.h" + +namespace xe { +namespace ui { + +int window_demo_main(const std::vector& args); + +std::unique_ptr CreateDemoGraphicsProvider(Window* window) { + return xe::ui::vulkan::VulkanProvider::Create(window); +} + +} // namespace ui +} // namespace xe + +DEFINE_ENTRY_POINT(L"xenia-ui-window-vulkan-demo", + L"xenia-ui-window-vulkan-demo", xe::ui::window_demo_main); diff --git a/src/xenia/ui/window_demo.cc b/src/xenia/ui/window_demo.cc index 75c74931b..454864a92 100644 --- a/src/xenia/ui/window_demo.cc +++ b/src/xenia/ui/window_demo.cc @@ -87,11 +87,10 @@ int window_demo_main(const std::vector& args) { }); window->on_closed.AddListener( - [&loop, &graphics_provider](xe::ui::UIEvent* e) { + [&loop, &window, &graphics_provider](xe::ui::UIEvent* e) { loop->Quit(); + Profiler::Shutdown(); XELOGI("User-initiated death!"); - graphics_provider.reset(); - exit(1); }); loop->on_quit.AddListener([&window](xe::ui::UIEvent* e) { window.reset(); }); @@ -116,11 +115,9 @@ int window_demo_main(const std::vector& args) { // Wait until we are exited. loop->AwaitQuit(); - loop->PostSynchronous([&graphics_provider]() { graphics_provider.reset(); }); window.reset(); loop.reset(); - Profiler::Dump(); - Profiler::Shutdown(); + graphics_provider.reset(); return 0; } diff --git a/third_party/spirv-tools b/third_party/spirv-tools index 224348faf..4d2f2239b 160000 --- a/third_party/spirv-tools +++ b/third_party/spirv-tools @@ -1 +1 @@ -Subproject commit 224348faf0616b5bea635141f4b28ee0faab3002 +Subproject commit 4d2f2239bf896dc14127e25011f41ac79d687052 diff --git a/third_party/spirv-tools.lua b/third_party/spirv-tools.lua index 90d0e151a..4218ff08e 100644 --- a/third_party/spirv-tools.lua +++ b/third_party/spirv-tools.lua @@ -16,9 +16,7 @@ project("spirv-tools") "spirv-tools/external/include/headers/GLSL.std.450.h", "spirv-tools/external/include/headers/OpenCL.std.h", "spirv-tools/external/include/headers/spirv.h", - "spirv-tools/include/libspirv/libspirv.h", - "spirv-tools/include/util/bitutils.h", - "spirv-tools/include/util/hex_float.h", + "spirv-tools/include/spirv-tools/libspirv.h", "spirv-tools/source/assembly_grammar.cpp", "spirv-tools/source/assembly_grammar.h", "spirv-tools/source/binary.cpp", @@ -26,10 +24,9 @@ project("spirv-tools") "spirv-tools/source/diagnostic.cpp", "spirv-tools/source/diagnostic.h", "spirv-tools/source/disassemble.cpp", - "spirv-tools/source/endian.cpp", - "spirv-tools/source/endian.h", "spirv-tools/source/ext_inst.cpp", "spirv-tools/source/ext_inst.h", + "spirv-tools/source/instruction.cpp", "spirv-tools/source/instruction.h", "spirv-tools/source/opcode.cpp", "spirv-tools/source/opcode.h", @@ -41,6 +38,8 @@ project("spirv-tools") "spirv-tools/source/print.h", "spirv-tools/source/spirv_constant.h", "spirv-tools/source/spirv_definition.h", + "spirv-tools/source/spirv_endian.cpp", + "spirv-tools/source/spirv_endian.h", "spirv-tools/source/spirv_operands.h", "spirv-tools/source/table.cpp", "spirv-tools/source/table.h", @@ -50,5 +49,13 @@ project("spirv-tools") "spirv-tools/source/text_handler.h", "spirv-tools/source/validate.cpp", "spirv-tools/source/validate.h", + "spirv-tools/source/validate_cfg.cpp", "spirv-tools/source/validate_id.cpp", + "spirv-tools/source/validate_instruction.cpp", + "spirv-tools/source/validate_layout.cpp", + "spirv-tools/source/validate_passes.h", + "spirv-tools/source/validate_ssa.cpp", + "spirv-tools/source/validate_types.cpp", + "spirv-tools/source/util/bitutils.h", + "spirv-tools/source/util/hex_float.h", }) diff --git a/third_party/spirv/GLSL.std.450.h b/third_party/spirv/GLSL.std.450.h index ed6f8b671..df31092be 100644 --- a/third_party/spirv/GLSL.std.450.h +++ b/third_party/spirv/GLSL.std.450.h @@ -1,5 +1,5 @@ /* -** Copyright (c) 2014-2015 The Khronos Group Inc. +** Copyright (c) 2014-2016 The Khronos Group Inc. ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** of this software and/or associated documentation files (the "Materials"), @@ -13,7 +13,7 @@ ** ** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS ** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ ** ** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, @@ -27,105 +27,105 @@ #ifndef GLSLstd450_H #define GLSLstd450_H -namespace spv { +static const int GLSLstd450Version = 100; +static const int GLSLstd450Revision = 1; -const int GLSLstd450Version = 99; -const int GLSLstd450Revision = 3; +enum GLSLstd450 { + GLSLstd450Bad = 0, // Don't use -enum class GLSLstd450 : unsigned { - Bad = 0, // Don't use + GLSLstd450Round = 1, + GLSLstd450RoundEven = 2, + GLSLstd450Trunc = 3, + GLSLstd450FAbs = 4, + GLSLstd450SAbs = 5, + GLSLstd450FSign = 6, + GLSLstd450SSign = 7, + GLSLstd450Floor = 8, + GLSLstd450Ceil = 9, + GLSLstd450Fract = 10, - Round = 1, - RoundEven = 2, - Trunc = 3, - FAbs = 4, - SAbs = 5, - FSign = 6, - SSign = 7, - Floor = 8, - Ceil = 9, - Fract = 10, + GLSLstd450Radians = 11, + GLSLstd450Degrees = 12, + GLSLstd450Sin = 13, + GLSLstd450Cos = 14, + GLSLstd450Tan = 15, + GLSLstd450Asin = 16, + GLSLstd450Acos = 17, + GLSLstd450Atan = 18, + GLSLstd450Sinh = 19, + GLSLstd450Cosh = 20, + GLSLstd450Tanh = 21, + GLSLstd450Asinh = 22, + GLSLstd450Acosh = 23, + GLSLstd450Atanh = 24, + GLSLstd450Atan2 = 25, - Radians = 11, - Degrees = 12, - Sin = 13, - Cos = 14, - Tan = 15, - Asin = 16, - Acos = 17, - Atan = 18, - Sinh = 19, - Cosh = 20, - Tanh = 21, - Asinh = 22, - Acosh = 23, - Atanh = 24, - Atan2 = 25, + GLSLstd450Pow = 26, + GLSLstd450Exp = 27, + GLSLstd450Log = 28, + GLSLstd450Exp2 = 29, + GLSLstd450Log2 = 30, + GLSLstd450Sqrt = 31, + GLSLstd450InverseSqrt = 32, - Pow = 26, - Exp = 27, - Log = 28, - Exp2 = 29, - Log2 = 30, - Sqrt = 31, - InverseSqrt = 32, + GLSLstd450Determinant = 33, + GLSLstd450MatrixInverse = 34, - Determinant = 33, - MatrixInverse = 34, + GLSLstd450Modf = 35, // second operand needs an OpVariable to write to + GLSLstd450ModfStruct = 36, // no OpVariable operand + GLSLstd450FMin = 37, + GLSLstd450UMin = 38, + GLSLstd450SMin = 39, + GLSLstd450FMax = 40, + GLSLstd450UMax = 41, + GLSLstd450SMax = 42, + GLSLstd450FClamp = 43, + GLSLstd450UClamp = 44, + GLSLstd450SClamp = 45, + GLSLstd450FMix = 46, + GLSLstd450IMix = 47, // Reserved + GLSLstd450Step = 48, + GLSLstd450SmoothStep = 49, - Modf = 35, // second operand needs an OpVariable to write to - ModfStruct = 36, // no OpVariable operand - FMin = 37, - UMin = 38, - SMin = 39, - FMax = 40, - UMax = 41, - SMax = 42, - FClamp = 43, - UClamp = 44, - SClamp = 45, - FMix = 46, - IMix = 47, - Step = 48, - SmoothStep = 49, + GLSLstd450Fma = 50, + GLSLstd450Frexp = 51, // second operand needs an OpVariable to write to + GLSLstd450FrexpStruct = 52, // no OpVariable operand + GLSLstd450Ldexp = 53, - Fma = 50, - Frexp = 51, // second operand needs an OpVariable to write to - FrexpStruct = 52, // no OpVariable operand - Ldexp = 53, + GLSLstd450PackSnorm4x8 = 54, + GLSLstd450PackUnorm4x8 = 55, + GLSLstd450PackSnorm2x16 = 56, + GLSLstd450PackUnorm2x16 = 57, + GLSLstd450PackHalf2x16 = 58, + GLSLstd450PackDouble2x32 = 59, + GLSLstd450UnpackSnorm2x16 = 60, + GLSLstd450UnpackUnorm2x16 = 61, + GLSLstd450UnpackHalf2x16 = 62, + GLSLstd450UnpackSnorm4x8 = 63, + GLSLstd450UnpackUnorm4x8 = 64, + GLSLstd450UnpackDouble2x32 = 65, - PackSnorm4x8 = 54, - PackUnorm4x8 = 55, - PackSnorm2x16 = 56, - PackUnorm2x16 = 57, - PackHalf2x16 = 58, - PackDouble2x32 = 59, - UnpackSnorm2x16 = 60, - UnpackUnorm2x16 = 61, - UnpackHalf2x16 = 62, - UnpackSnorm4x8 = 63, - UnpackUnorm4x8 = 64, - UnpackDouble2x32 = 65, + GLSLstd450Length = 66, + GLSLstd450Distance = 67, + GLSLstd450Cross = 68, + GLSLstd450Normalize = 69, + GLSLstd450FaceForward = 70, + GLSLstd450Reflect = 71, + GLSLstd450Refract = 72, - Length = 66, - Distance = 67, - Cross = 68, - Normalize = 69, - FaceForward = 70, - Reflect = 71, - Refract = 72, + GLSLstd450FindILsb = 73, + GLSLstd450FindSMsb = 74, + GLSLstd450FindUMsb = 75, - FindILsb = 73, - FindSMsb = 74, - FindUMsb = 75, + GLSLstd450InterpolateAtCentroid = 76, + GLSLstd450InterpolateAtSample = 77, + GLSLstd450InterpolateAtOffset = 78, - InterpolateAtCentroid = 76, - InterpolateAtSample = 77, - InterpolateAtOffset = 78, + GLSLstd450NMin = 79, + GLSLstd450NMax = 80, + GLSLstd450NClamp = 81, - Count + GLSLstd450Count }; -} // end namespace spv - #endif // #ifndef GLSLstd450_H diff --git a/third_party/spirv/GLSL.std.450.hpp11 b/third_party/spirv/GLSL.std.450.hpp11 new file mode 100644 index 000000000..526912006 --- /dev/null +++ b/third_party/spirv/GLSL.std.450.hpp11 @@ -0,0 +1,135 @@ +/* +** Copyright (c) 2014-2016 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a copy +** of this software and/or associated documentation files (the "Materials"), +** to deal in the Materials without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Materials, and to permit persons to whom the +** Materials are furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Materials. +** +** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +** IN THE MATERIALS. +*/ + +#ifndef GLSLstd450_HPP +#define GLSLstd450_HPP + +namespace spv { + +constexpr int GLSLstd450Version = 100; +constexpr int GLSLstd450Revision = 1; + +enum class GLSLstd450 { + kBad = 0, // Don't use + + kRound = 1, + kRoundEven = 2, + kTrunc = 3, + kFAbs = 4, + kSAbs = 5, + kFSign = 6, + kSSign = 7, + kFloor = 8, + kCeil = 9, + kFract = 10, + + kRadians = 11, + kDegrees = 12, + kSin = 13, + kCos = 14, + kTan = 15, + kAsin = 16, + kAcos = 17, + kAtan = 18, + kSinh = 19, + kCosh = 20, + kTanh = 21, + kAsinh = 22, + kAcosh = 23, + kAtanh = 24, + kAtan2 = 25, + + kPow = 26, + kExp = 27, + kLog = 28, + kExp2 = 29, + kLog2 = 30, + kSqrt = 31, + kInverseSqrt = 32, + + kDeterminant = 33, + kMatrixInverse = 34, + + kModf = 35, // second operand needs an OpVariable to write to + kModfStruct = 36, // no OpVariable operand + kFMin = 37, + kUMin = 38, + kSMin = 39, + kFMax = 40, + kUMax = 41, + kSMax = 42, + kFClamp = 43, + kUClamp = 44, + kSClamp = 45, + kFMix = 46, + kIMix = 47, // Reserved + kStep = 48, + kSmoothStep = 49, + + kFma = 50, + kFrexp = 51, // second operand needs an OpVariable to write to + kFrexpStruct = 52, // no OpVariable operand + kLdexp = 53, + + kPackSnorm4x8 = 54, + kPackUnorm4x8 = 55, + kPackSnorm2x16 = 56, + kPackUnorm2x16 = 57, + kPackHalf2x16 = 58, + kPackDouble2x32 = 59, + kUnpackSnorm2x16 = 60, + kUnpackUnorm2x16 = 61, + kUnpackHalf2x16 = 62, + kUnpackSnorm4x8 = 63, + kUnpackUnorm4x8 = 64, + kUnpackDouble2x32 = 65, + + kLength = 66, + kDistance = 67, + kCross = 68, + kNormalize = 69, + kFaceForward = 70, + kReflect = 71, + kRefract = 72, + + kFindILsb = 73, + kFindSMsb = 74, + kFindUMsb = 75, + + kInterpolateAtCentroid = 76, + kInterpolateAtSample = 77, + kInterpolateAtOffset = 78, + + kNMin = 79, + kNMax = 80, + kNClamp = 81, + + kCount +}; + +} // namespace spv + +#endif // #ifndef GLSLstd450_HPP diff --git a/third_party/spirv/OpenCL.std.h b/third_party/spirv/OpenCL.std.h new file mode 100644 index 000000000..af29c527e --- /dev/null +++ b/third_party/spirv/OpenCL.std.h @@ -0,0 +1,272 @@ +/* +** Copyright (c) 2015-2016 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a copy +** of this software and/or associated documentation files (the "Materials"), +** to deal in the Materials without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Materials, and to permit persons to whom the +** Materials are furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Materials. +** +** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +** IN THE MATERIALS. +*/ + +// +// Author: Boaz Ouriel, Intel +// + +namespace OpenCLLIB { + +enum Entrypoints { + + // math functions + Acos = 0, + Acosh = 1, + Acospi = 2, + Asin = 3, + Asinh = 4, + Asinpi = 5, + Atan = 6, + Atan2 = 7, + Atanh = 8, + Atanpi = 9, + Atan2pi = 10, + Cbrt = 11, + Ceil = 12, + Copysign = 13, + Cos = 14, + Cosh = 15, + Cospi = 16, + Erfc = 17, + Erf = 18, + Exp = 19, + Exp2 = 20, + Exp10 = 21, + Expm1 = 22, + Fabs = 23, + Fdim = 24, + Floor = 25, + Fma = 26, + Fmax = 27, + Fmin = 28, + Fmod = 29, + Fract = 30, + Frexp = 31, + Hypot = 32, + Ilogb = 33, + Ldexp = 34, + Lgamma = 35, + Lgamma_r = 36, + Log = 37, + Log2 = 38, + Log10 = 39, + Log1p = 40, + Logb = 41, + Mad = 42, + Maxmag = 43, + Minmag = 44, + Modf = 45, + Nan = 46, + Nextafter = 47, + Pow = 48, + Pown = 49, + Powr = 50, + Remainder = 51, + Remquo = 52, + Rint = 53, + Rootn = 54, + Round = 55, + Rsqrt = 56, + Sin = 57, + Sincos = 58, + Sinh = 59, + Sinpi = 60, + Sqrt = 61, + Tan = 62, + Tanh = 63, + Tanpi = 64, + Tgamma = 65, + Trunc = 66, + Half_cos = 67, + Half_divide = 68, + Half_exp = 69, + Half_exp2 = 70, + Half_exp10 = 71, + Half_log = 72, + Half_log2 = 73, + Half_log10 = 74, + Half_powr = 75, + Half_recip = 76, + Half_rsqrt = 77, + Half_sin = 78, + Half_sqrt = 79, + Half_tan = 80, + Native_cos = 81, + Native_divide = 82, + Native_exp = 83, + Native_exp2 = 84, + Native_exp10 = 85, + Native_log = 86, + Native_log2 = 87, + Native_log10 = 88, + Native_powr = 89, + Native_recip = 90, + Native_rsqrt = 91, + Native_sin = 92, + Native_sqrt = 93, + Native_tan = 94, + + // Common + FClamp = 95, + Degrees = 96, + FMax_common = 97, + FMin_common = 98, + Mix = 99, + Radians = 100, + Step = 101, + Smoothstep = 102, + Sign = 103, + + // Geometrics + Cross = 104, + Distance = 105, + Length = 106, + Normalize = 107, + Fast_distance = 108, + Fast_length = 109, + Fast_normalize = 110, + + // Images - Deprecated + Read_imagef = 111, + Read_imagei = 112, + Read_imageui = 113, + Read_imageh = 114, + + Read_imagef_samplerless = 115, + Read_imagei_samplerless = 116, + Read_imageui_samplerless = 117, + Read_imageh_samplerless = 118, + + Write_imagef = 119, + Write_imagei = 120, + Write_imageui = 121, + Write_imageh = 122, + Read_imagef_mipmap_lod = 123, + Read_imagei_mipmap_lod = 124, + Read_imageui_mipmap_lod = 125, + Read_imagef_mipmap_grad = 126, + Read_imagei_mipmap_grad = 127, + Read_imageui_mipmap_grad = 128, + + // Image write with LOD + Write_imagef_mipmap_lod = 129, + Write_imagei_mipmap_lod = 130, + Write_imageui_mipmap_lod = 131, + + // Images - Deprecated + Get_image_width = 132, + Get_image_height = 133, + Get_image_depth = 134, + Get_image_channel_data_type = 135, + Get_image_channel_order = 136, + Get_image_dim = 137, + Get_image_array_size = 138, + Get_image_num_samples = 139, + Get_image_num_mip_levels = 140, + + // Integers + SAbs = 141, + SAbs_diff = 142, + SAdd_sat = 143, + UAdd_sat = 144, + SHadd = 145, + UHadd = 146, + SRhadd = 147, + URhadd = 148, + SClamp = 149, + UClamp = 150, + Clz = 151, + Ctz = 152, + SMad_hi = 153, + UMad_sat = 154, + SMad_sat = 155, + SMax = 156, + UMax = 157, + SMin = 158, + UMin = 159, + SMul_hi = 160, + Rotate = 161, + SSub_sat = 162, + USub_sat = 163, + U_Upsample = 164, + S_Upsample = 165, + Popcount = 166, + SMad24 = 167, + UMad24 = 168, + SMul24 = 169, + UMul24 = 170, + + // Vector Loads/Stores + Vloadn = 171, + Vstoren = 172, + Vload_half = 173, + Vload_halfn = 174, + Vstore_half = 175, + Vstore_half_r = 176, + Vstore_halfn = 177, + Vstore_halfn_r = 178, + Vloada_halfn = 179, + Vstorea_halfn = 180, + Vstorea_halfn_r = 181, + + // Vector Misc + Shuffle = 182, + Shuffle2 = 183, + + // + Printf = 184, + Prefetch = 185, + + // Relationals + Bitselect = 186, + Select = 187, + + // pipes + Read_pipe = 188, + Write_pipe = 189, + Reserve_read_pipe = 190, + Reserve_write_pipe = 191, + Commit_read_pipe = 192, + Commit_write_pipe = 193, + Is_valid_reserve_id = 194, + Work_group_reserve_read_pipe = 195, + Work_group_reserve_write_pipe = 196, + Work_group_commit_read_pipe = 197, + Work_group_commit_write_pipe = 198, + Get_pipe_num_packets = 199, + Get_pipe_max_packets = 200, + + // more integers + UAbs = 201, + UAbs_diff = 202, + UMul_hi = 203, + UMad_hi = 204, +}; + + + +}; // end namespace OpenCL20 + diff --git a/third_party/spirv/spirv.h b/third_party/spirv/spirv.h index 136121600..d48488e94 100644 --- a/third_party/spirv/spirv.h +++ b/third_party/spirv/spirv.h @@ -1,877 +1,871 @@ -// Copyright (c) 2014-2015 The Khronos Group Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and/or associated documentation files (the "Materials"), -// to deal in the Materials without restriction, including without limitation -// the rights to use, copy, modify, merge, publish, distribute, sublicense, -// and/or sell copies of the Materials, and to permit persons to whom the -// Materials are furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Materials. -// -// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS -// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND -// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ -// -// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS -// IN THE MATERIALS. +/* +** Copyright (c) 2014-2016 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a copy +** of this software and/or associated documentation files (the "Materials"), +** to deal in the Materials without restriction, including without limitation +** the rights to use, copy, modify, merge, publish, distribute, sublicense, +** and/or sell copies of the Materials, and to permit persons to whom the +** Materials are furnished to do so, subject to the following conditions: +** +** The above copyright notice and this permission notice shall be included in +** all copies or substantial portions of the Materials. +** +** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +** IN THE MATERIALS. +*/ -// This header is automatically generated by the same tool that creates -// the Binary Section of the SPIR-V specification. +/* +** This header is automatically generated by the same tool that creates +** the Binary Section of the SPIR-V specification. +*/ -// Enumeration tokens for SPIR-V, in various styles: -// C, C++, C++11, JSON, Lua, Python -// -// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL -// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL -// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL -// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL -// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] -// -// Some tokens act like mask values, which can be OR'd together, -// while others are mutually exclusive. The mask-like ones have -// "Mask" in their name, and a parallel enum that has the shift -// amount (1 << x) for each corresponding enumerant. +/* +** Enumeration tokens for SPIR-V, in various styles: +** C, C++, C++11, JSON, Lua, Python +** +** - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL +** - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL +** - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL +** - Lua will use tables, e.g.: spv.SourceLanguage.GLSL +** - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +** +** Some tokens act like mask values, which can be OR'd together, +** while others are mutually exclusive. The mask-like ones have +** "Mask" in their name, and a parallel enum that has the shift +** amount (1 << x) for each corresponding enumerant. +*/ -#ifndef spirv_H11 -#define spirv_H11 +#ifndef spirv_H +#define spirv_H -namespace spv { - -typedef unsigned int Id; +typedef unsigned int SpvId; #define SPV_VERSION 0x10000 -#define SPV_REVISION 2 +#define SPV_REVISION 3 -static const unsigned int MagicNumber = 0x07230203; -static const unsigned int Version = 0x00010000; -static const unsigned int Revision = 2; -static const unsigned int OpCodeMask = 0xffff; -static const unsigned int WordCountShift = 16; +static const unsigned int SpvMagicNumber = 0x07230203; +static const unsigned int SpvVersion = 0x00010000; +static const unsigned int SpvRevision = 3; +static const unsigned int SpvOpCodeMask = 0xffff; +static const unsigned int SpvWordCountShift = 16; -enum class SourceLanguage : unsigned { - Unknown = 0, - ESSL = 1, - GLSL = 2, - OpenCL_C = 3, - OpenCL_CPP = 4, -}; +typedef enum SpvSourceLanguage_ { + SpvSourceLanguageUnknown = 0, + SpvSourceLanguageESSL = 1, + SpvSourceLanguageGLSL = 2, + SpvSourceLanguageOpenCL_C = 3, + SpvSourceLanguageOpenCL_CPP = 4, +} SpvSourceLanguage; -enum class ExecutionModel : unsigned { - Vertex = 0, - TessellationControl = 1, - TessellationEvaluation = 2, - Geometry = 3, - Fragment = 4, - GLCompute = 5, - Kernel = 6, -}; +typedef enum SpvExecutionModel_ { + SpvExecutionModelVertex = 0, + SpvExecutionModelTessellationControl = 1, + SpvExecutionModelTessellationEvaluation = 2, + SpvExecutionModelGeometry = 3, + SpvExecutionModelFragment = 4, + SpvExecutionModelGLCompute = 5, + SpvExecutionModelKernel = 6, +} SpvExecutionModel; -enum class AddressingModel : unsigned { - Logical = 0, - Physical32 = 1, - Physical64 = 2, -}; +typedef enum SpvAddressingModel_ { + SpvAddressingModelLogical = 0, + SpvAddressingModelPhysical32 = 1, + SpvAddressingModelPhysical64 = 2, +} SpvAddressingModel; -enum class MemoryModel : unsigned { - Simple = 0, - GLSL450 = 1, - OpenCL = 2, -}; +typedef enum SpvMemoryModel_ { + SpvMemoryModelSimple = 0, + SpvMemoryModelGLSL450 = 1, + SpvMemoryModelOpenCL = 2, +} SpvMemoryModel; -enum class ExecutionMode : unsigned { - Invocations = 0, - SpacingEqual = 1, - SpacingFractionalEven = 2, - SpacingFractionalOdd = 3, - VertexOrderCw = 4, - VertexOrderCcw = 5, - PixelCenterInteger = 6, - OriginUpperLeft = 7, - OriginLowerLeft = 8, - EarlyFragmentTests = 9, - PointMode = 10, - Xfb = 11, - DepthReplacing = 12, - DepthGreater = 14, - DepthLess = 15, - DepthUnchanged = 16, - LocalSize = 17, - LocalSizeHint = 18, - InputPoints = 19, - InputLines = 20, - InputLinesAdjacency = 21, - Triangles = 22, - InputTrianglesAdjacency = 23, - Quads = 24, - Isolines = 25, - OutputVertices = 26, - OutputPoints = 27, - OutputLineStrip = 28, - OutputTriangleStrip = 29, - VecTypeHint = 30, - ContractionOff = 31, -}; +typedef enum SpvExecutionMode_ { + SpvExecutionModeInvocations = 0, + SpvExecutionModeSpacingEqual = 1, + SpvExecutionModeSpacingFractionalEven = 2, + SpvExecutionModeSpacingFractionalOdd = 3, + SpvExecutionModeVertexOrderCw = 4, + SpvExecutionModeVertexOrderCcw = 5, + SpvExecutionModePixelCenterInteger = 6, + SpvExecutionModeOriginUpperLeft = 7, + SpvExecutionModeOriginLowerLeft = 8, + SpvExecutionModeEarlyFragmentTests = 9, + SpvExecutionModePointMode = 10, + SpvExecutionModeXfb = 11, + SpvExecutionModeDepthReplacing = 12, + SpvExecutionModeDepthGreater = 14, + SpvExecutionModeDepthLess = 15, + SpvExecutionModeDepthUnchanged = 16, + SpvExecutionModeLocalSize = 17, + SpvExecutionModeLocalSizeHint = 18, + SpvExecutionModeInputPoints = 19, + SpvExecutionModeInputLines = 20, + SpvExecutionModeInputLinesAdjacency = 21, + SpvExecutionModeTriangles = 22, + SpvExecutionModeInputTrianglesAdjacency = 23, + SpvExecutionModeQuads = 24, + SpvExecutionModeIsolines = 25, + SpvExecutionModeOutputVertices = 26, + SpvExecutionModeOutputPoints = 27, + SpvExecutionModeOutputLineStrip = 28, + SpvExecutionModeOutputTriangleStrip = 29, + SpvExecutionModeVecTypeHint = 30, + SpvExecutionModeContractionOff = 31, +} SpvExecutionMode; -enum class StorageClass : unsigned { - UniformConstant = 0, - Input = 1, - Uniform = 2, - Output = 3, - Workgroup = 4, - CrossWorkgroup = 5, - Private = 6, - Function = 7, - Generic = 8, - PushConstant = 9, - AtomicCounter = 10, - Image = 11, -}; +typedef enum SpvStorageClass_ { + SpvStorageClassUniformConstant = 0, + SpvStorageClassInput = 1, + SpvStorageClassUniform = 2, + SpvStorageClassOutput = 3, + SpvStorageClassWorkgroup = 4, + SpvStorageClassCrossWorkgroup = 5, + SpvStorageClassPrivate = 6, + SpvStorageClassFunction = 7, + SpvStorageClassGeneric = 8, + SpvStorageClassPushConstant = 9, + SpvStorageClassAtomicCounter = 10, + SpvStorageClassImage = 11, +} SpvStorageClass; -enum class Dim : unsigned { - Dim1D = 0, - Dim2D = 1, - Dim3D = 2, - Cube = 3, - Rect = 4, - Buffer = 5, - SubpassData = 6, -}; +typedef enum SpvDim_ { + SpvDim1D = 0, + SpvDim2D = 1, + SpvDim3D = 2, + SpvDimCube = 3, + SpvDimRect = 4, + SpvDimBuffer = 5, + SpvDimSubpassData = 6, +} SpvDim; -enum class SamplerAddressingMode : unsigned { - None = 0, - ClampToEdge = 1, - Clamp = 2, - Repeat = 3, - RepeatMirrored = 4, -}; +typedef enum SpvSamplerAddressingMode_ { + SpvSamplerAddressingModeNone = 0, + SpvSamplerAddressingModeClampToEdge = 1, + SpvSamplerAddressingModeClamp = 2, + SpvSamplerAddressingModeRepeat = 3, + SpvSamplerAddressingModeRepeatMirrored = 4, +} SpvSamplerAddressingMode; -enum class SamplerFilterMode : unsigned { - Nearest = 0, - Linear = 1, -}; +typedef enum SpvSamplerFilterMode_ { + SpvSamplerFilterModeNearest = 0, + SpvSamplerFilterModeLinear = 1, +} SpvSamplerFilterMode; -enum class ImageFormat : unsigned { - Unknown = 0, - Rgba32f = 1, - Rgba16f = 2, - R32f = 3, - Rgba8 = 4, - Rgba8Snorm = 5, - Rg32f = 6, - Rg16f = 7, - R11fG11fB10f = 8, - R16f = 9, - Rgba16 = 10, - Rgb10A2 = 11, - Rg16 = 12, - Rg8 = 13, - R16 = 14, - R8 = 15, - Rgba16Snorm = 16, - Rg16Snorm = 17, - Rg8Snorm = 18, - R16Snorm = 19, - R8Snorm = 20, - Rgba32i = 21, - Rgba16i = 22, - Rgba8i = 23, - R32i = 24, - Rg32i = 25, - Rg16i = 26, - Rg8i = 27, - R16i = 28, - R8i = 29, - Rgba32ui = 30, - Rgba16ui = 31, - Rgba8ui = 32, - R32ui = 33, - Rgb10a2ui = 34, - Rg32ui = 35, - Rg16ui = 36, - Rg8ui = 37, - R16ui = 38, - R8ui = 39, -}; +typedef enum SpvImageFormat_ { + SpvImageFormatUnknown = 0, + SpvImageFormatRgba32f = 1, + SpvImageFormatRgba16f = 2, + SpvImageFormatR32f = 3, + SpvImageFormatRgba8 = 4, + SpvImageFormatRgba8Snorm = 5, + SpvImageFormatRg32f = 6, + SpvImageFormatRg16f = 7, + SpvImageFormatR11fG11fB10f = 8, + SpvImageFormatR16f = 9, + SpvImageFormatRgba16 = 10, + SpvImageFormatRgb10A2 = 11, + SpvImageFormatRg16 = 12, + SpvImageFormatRg8 = 13, + SpvImageFormatR16 = 14, + SpvImageFormatR8 = 15, + SpvImageFormatRgba16Snorm = 16, + SpvImageFormatRg16Snorm = 17, + SpvImageFormatRg8Snorm = 18, + SpvImageFormatR16Snorm = 19, + SpvImageFormatR8Snorm = 20, + SpvImageFormatRgba32i = 21, + SpvImageFormatRgba16i = 22, + SpvImageFormatRgba8i = 23, + SpvImageFormatR32i = 24, + SpvImageFormatRg32i = 25, + SpvImageFormatRg16i = 26, + SpvImageFormatRg8i = 27, + SpvImageFormatR16i = 28, + SpvImageFormatR8i = 29, + SpvImageFormatRgba32ui = 30, + SpvImageFormatRgba16ui = 31, + SpvImageFormatRgba8ui = 32, + SpvImageFormatR32ui = 33, + SpvImageFormatRgb10a2ui = 34, + SpvImageFormatRg32ui = 35, + SpvImageFormatRg16ui = 36, + SpvImageFormatRg8ui = 37, + SpvImageFormatR16ui = 38, + SpvImageFormatR8ui = 39, +} SpvImageFormat; -enum class ImageChannelOrder : unsigned { - R = 0, - A = 1, - RG = 2, - RA = 3, - RGB = 4, - RGBA = 5, - BGRA = 6, - ARGB = 7, - Intensity = 8, - Luminance = 9, - Rx = 10, - RGx = 11, - RGBx = 12, - Depth = 13, - DepthStencil = 14, - sRGB = 15, - sRGBx = 16, - sRGBA = 17, - sBGRA = 18, -}; +typedef enum SpvImageChannelOrder_ { + SpvImageChannelOrderR = 0, + SpvImageChannelOrderA = 1, + SpvImageChannelOrderRG = 2, + SpvImageChannelOrderRA = 3, + SpvImageChannelOrderRGB = 4, + SpvImageChannelOrderRGBA = 5, + SpvImageChannelOrderBGRA = 6, + SpvImageChannelOrderARGB = 7, + SpvImageChannelOrderIntensity = 8, + SpvImageChannelOrderLuminance = 9, + SpvImageChannelOrderRx = 10, + SpvImageChannelOrderRGx = 11, + SpvImageChannelOrderRGBx = 12, + SpvImageChannelOrderDepth = 13, + SpvImageChannelOrderDepthStencil = 14, + SpvImageChannelOrdersRGB = 15, + SpvImageChannelOrdersRGBx = 16, + SpvImageChannelOrdersRGBA = 17, + SpvImageChannelOrdersBGRA = 18, +} SpvImageChannelOrder; -enum class ImageChannelDataType : unsigned { - SnormInt8 = 0, - SnormInt16 = 1, - UnormInt8 = 2, - UnormInt16 = 3, - UnormShort565 = 4, - UnormShort555 = 5, - UnormInt101010 = 6, - SignedInt8 = 7, - SignedInt16 = 8, - SignedInt32 = 9, - UnsignedInt8 = 10, - UnsignedInt16 = 11, - UnsignedInt32 = 12, - HalfFloat = 13, - Float = 14, - UnormInt24 = 15, - UnormInt101010_2 = 16, -}; +typedef enum SpvImageChannelDataType_ { + SpvImageChannelDataTypeSnormInt8 = 0, + SpvImageChannelDataTypeSnormInt16 = 1, + SpvImageChannelDataTypeUnormInt8 = 2, + SpvImageChannelDataTypeUnormInt16 = 3, + SpvImageChannelDataTypeUnormShort565 = 4, + SpvImageChannelDataTypeUnormShort555 = 5, + SpvImageChannelDataTypeUnormInt101010 = 6, + SpvImageChannelDataTypeSignedInt8 = 7, + SpvImageChannelDataTypeSignedInt16 = 8, + SpvImageChannelDataTypeSignedInt32 = 9, + SpvImageChannelDataTypeUnsignedInt8 = 10, + SpvImageChannelDataTypeUnsignedInt16 = 11, + SpvImageChannelDataTypeUnsignedInt32 = 12, + SpvImageChannelDataTypeHalfFloat = 13, + SpvImageChannelDataTypeFloat = 14, + SpvImageChannelDataTypeUnormInt24 = 15, + SpvImageChannelDataTypeUnormInt101010_2 = 16, +} SpvImageChannelDataType; -enum class ImageOperandsShift : unsigned { - Bias = 0, - Lod = 1, - Grad = 2, - ConstOffset = 3, - Offset = 4, - ConstOffsets = 5, - Sample = 6, - MinLod = 7, -}; +typedef enum SpvImageOperandsShift_ { + SpvImageOperandsBiasShift = 0, + SpvImageOperandsLodShift = 1, + SpvImageOperandsGradShift = 2, + SpvImageOperandsConstOffsetShift = 3, + SpvImageOperandsOffsetShift = 4, + SpvImageOperandsConstOffsetsShift = 5, + SpvImageOperandsSampleShift = 6, + SpvImageOperandsMinLodShift = 7, +} SpvImageOperandsShift; -enum class ImageOperandsMask : unsigned { - MaskNone = 0, - Bias = 0x00000001, - Lod = 0x00000002, - Grad = 0x00000004, - ConstOffset = 0x00000008, - Offset = 0x00000010, - ConstOffsets = 0x00000020, - Sample = 0x00000040, - MinLod = 0x00000080, -}; +typedef enum SpvImageOperandsMask_ { + SpvImageOperandsMaskNone = 0, + SpvImageOperandsBiasMask = 0x00000001, + SpvImageOperandsLodMask = 0x00000002, + SpvImageOperandsGradMask = 0x00000004, + SpvImageOperandsConstOffsetMask = 0x00000008, + SpvImageOperandsOffsetMask = 0x00000010, + SpvImageOperandsConstOffsetsMask = 0x00000020, + SpvImageOperandsSampleMask = 0x00000040, + SpvImageOperandsMinLodMask = 0x00000080, +} SpvImageOperandsMask; -enum class FPFastMathModeShift : unsigned { - NotNaN = 0, - NotInf = 1, - NSZ = 2, - AllowRecip = 3, - Fast = 4, -}; +typedef enum SpvFPFastMathModeShift_ { + SpvFPFastMathModeNotNaNShift = 0, + SpvFPFastMathModeNotInfShift = 1, + SpvFPFastMathModeNSZShift = 2, + SpvFPFastMathModeAllowRecipShift = 3, + SpvFPFastMathModeFastShift = 4, +} SpvFPFastMathModeShift; -enum class FPFastMathModeMask : unsigned { - MaskNone = 0, - NotNaN = 0x00000001, - NotInf = 0x00000002, - NSZ = 0x00000004, - AllowRecip = 0x00000008, - Fast = 0x00000010, -}; +typedef enum SpvFPFastMathModeMask_ { + SpvFPFastMathModeMaskNone = 0, + SpvFPFastMathModeNotNaNMask = 0x00000001, + SpvFPFastMathModeNotInfMask = 0x00000002, + SpvFPFastMathModeNSZMask = 0x00000004, + SpvFPFastMathModeAllowRecipMask = 0x00000008, + SpvFPFastMathModeFastMask = 0x00000010, +} SpvFPFastMathModeMask; -enum class FPRoundingMode : unsigned { - RTE = 0, - RTZ = 1, - RTP = 2, - RTN = 3, -}; +typedef enum SpvFPRoundingMode_ { + SpvFPRoundingModeRTE = 0, + SpvFPRoundingModeRTZ = 1, + SpvFPRoundingModeRTP = 2, + SpvFPRoundingModeRTN = 3, +} SpvFPRoundingMode; -enum class LinkageType : unsigned { - Export = 0, - Import = 1, -}; +typedef enum SpvLinkageType_ { + SpvLinkageTypeExport = 0, + SpvLinkageTypeImport = 1, +} SpvLinkageType; -enum class AccessQualifier : unsigned { - ReadOnly = 0, - WriteOnly = 1, - ReadWrite = 2, -}; +typedef enum SpvAccessQualifier_ { + SpvAccessQualifierReadOnly = 0, + SpvAccessQualifierWriteOnly = 1, + SpvAccessQualifierReadWrite = 2, +} SpvAccessQualifier; -enum class FunctionParameterAttribute : unsigned { - Zext = 0, - Sext = 1, - ByVal = 2, - Sret = 3, - NoAlias = 4, - NoCapture = 5, - NoWrite = 6, - NoReadWrite = 7, -}; +typedef enum SpvFunctionParameterAttribute_ { + SpvFunctionParameterAttributeZext = 0, + SpvFunctionParameterAttributeSext = 1, + SpvFunctionParameterAttributeByVal = 2, + SpvFunctionParameterAttributeSret = 3, + SpvFunctionParameterAttributeNoAlias = 4, + SpvFunctionParameterAttributeNoCapture = 5, + SpvFunctionParameterAttributeNoWrite = 6, + SpvFunctionParameterAttributeNoReadWrite = 7, +} SpvFunctionParameterAttribute; -enum class Decoration : unsigned { - RelaxedPrecision = 0, - SpecId = 1, - Block = 2, - BufferBlock = 3, - RowMajor = 4, - ColMajor = 5, - ArrayStride = 6, - MatrixStride = 7, - GLSLShared = 8, - GLSLPacked = 9, - CPacked = 10, - BuiltIn = 11, - NoPerspective = 13, - Flat = 14, - Patch = 15, - Centroid = 16, - Sample = 17, - Invariant = 18, - Restrict = 19, - Aliased = 20, - Volatile = 21, - Constant = 22, - Coherent = 23, - NonWritable = 24, - NonReadable = 25, - Uniform = 26, - SaturatedConversion = 28, - Stream = 29, - Location = 30, - Component = 31, - Index = 32, - Binding = 33, - DescriptorSet = 34, - Offset = 35, - XfbBuffer = 36, - XfbStride = 37, - FuncParamAttr = 38, - FPRoundingMode = 39, - FPFastMathMode = 40, - LinkageAttributes = 41, - NoContraction = 42, - InputAttachmentIndex = 43, - Alignment = 44, -}; +typedef enum SpvDecoration_ { + SpvDecorationRelaxedPrecision = 0, + SpvDecorationSpecId = 1, + SpvDecorationBlock = 2, + SpvDecorationBufferBlock = 3, + SpvDecorationRowMajor = 4, + SpvDecorationColMajor = 5, + SpvDecorationArrayStride = 6, + SpvDecorationMatrixStride = 7, + SpvDecorationGLSLShared = 8, + SpvDecorationGLSLPacked = 9, + SpvDecorationCPacked = 10, + SpvDecorationBuiltIn = 11, + SpvDecorationNoPerspective = 13, + SpvDecorationFlat = 14, + SpvDecorationPatch = 15, + SpvDecorationCentroid = 16, + SpvDecorationSample = 17, + SpvDecorationInvariant = 18, + SpvDecorationRestrict = 19, + SpvDecorationAliased = 20, + SpvDecorationVolatile = 21, + SpvDecorationConstant = 22, + SpvDecorationCoherent = 23, + SpvDecorationNonWritable = 24, + SpvDecorationNonReadable = 25, + SpvDecorationUniform = 26, + SpvDecorationSaturatedConversion = 28, + SpvDecorationStream = 29, + SpvDecorationLocation = 30, + SpvDecorationComponent = 31, + SpvDecorationIndex = 32, + SpvDecorationBinding = 33, + SpvDecorationDescriptorSet = 34, + SpvDecorationOffset = 35, + SpvDecorationXfbBuffer = 36, + SpvDecorationXfbStride = 37, + SpvDecorationFuncParamAttr = 38, + SpvDecorationFPRoundingMode = 39, + SpvDecorationFPFastMathMode = 40, + SpvDecorationLinkageAttributes = 41, + SpvDecorationNoContraction = 42, + SpvDecorationInputAttachmentIndex = 43, + SpvDecorationAlignment = 44, +} SpvDecoration; -enum class BuiltIn : unsigned { - Position = 0, - PointSize = 1, - ClipDistance = 3, - CullDistance = 4, - VertexId = 5, - InstanceId = 6, - PrimitiveId = 7, - InvocationId = 8, - Layer = 9, - ViewportIndex = 10, - TessLevelOuter = 11, - TessLevelInner = 12, - TessCoord = 13, - PatchVertices = 14, - FragCoord = 15, - PointCoord = 16, - FrontFacing = 17, - SampleId = 18, - SamplePosition = 19, - SampleMask = 20, - FragDepth = 22, - HelperInvocation = 23, - NumWorkgroups = 24, - WorkgroupSize = 25, - WorkgroupId = 26, - LocalInvocationId = 27, - GlobalInvocationId = 28, - LocalInvocationIndex = 29, - WorkDim = 30, - GlobalSize = 31, - EnqueuedWorkgroupSize = 32, - GlobalOffset = 33, - GlobalLinearId = 34, - SubgroupSize = 36, - SubgroupMaxSize = 37, - NumSubgroups = 38, - NumEnqueuedSubgroups = 39, - SubgroupId = 40, - SubgroupLocalInvocationId = 41, - VertexIndex = 42, - InstanceIndex = 43, -}; +typedef enum SpvBuiltIn_ { + SpvBuiltInPosition = 0, + SpvBuiltInPointSize = 1, + SpvBuiltInClipDistance = 3, + SpvBuiltInCullDistance = 4, + SpvBuiltInVertexId = 5, + SpvBuiltInInstanceId = 6, + SpvBuiltInPrimitiveId = 7, + SpvBuiltInInvocationId = 8, + SpvBuiltInLayer = 9, + SpvBuiltInViewportIndex = 10, + SpvBuiltInTessLevelOuter = 11, + SpvBuiltInTessLevelInner = 12, + SpvBuiltInTessCoord = 13, + SpvBuiltInPatchVertices = 14, + SpvBuiltInFragCoord = 15, + SpvBuiltInPointCoord = 16, + SpvBuiltInFrontFacing = 17, + SpvBuiltInSampleId = 18, + SpvBuiltInSamplePosition = 19, + SpvBuiltInSampleMask = 20, + SpvBuiltInFragDepth = 22, + SpvBuiltInHelperInvocation = 23, + SpvBuiltInNumWorkgroups = 24, + SpvBuiltInWorkgroupSize = 25, + SpvBuiltInWorkgroupId = 26, + SpvBuiltInLocalInvocationId = 27, + SpvBuiltInGlobalInvocationId = 28, + SpvBuiltInLocalInvocationIndex = 29, + SpvBuiltInWorkDim = 30, + SpvBuiltInGlobalSize = 31, + SpvBuiltInEnqueuedWorkgroupSize = 32, + SpvBuiltInGlobalOffset = 33, + SpvBuiltInGlobalLinearId = 34, + SpvBuiltInSubgroupSize = 36, + SpvBuiltInSubgroupMaxSize = 37, + SpvBuiltInNumSubgroups = 38, + SpvBuiltInNumEnqueuedSubgroups = 39, + SpvBuiltInSubgroupId = 40, + SpvBuiltInSubgroupLocalInvocationId = 41, + SpvBuiltInVertexIndex = 42, + SpvBuiltInInstanceIndex = 43, +} SpvBuiltIn; -enum class SelectionControlShift : unsigned { - Flatten = 0, - DontFlatten = 1, -}; +typedef enum SpvSelectionControlShift_ { + SpvSelectionControlFlattenShift = 0, + SpvSelectionControlDontFlattenShift = 1, +} SpvSelectionControlShift; -enum class SelectionControlMask : unsigned { - MaskNone = 0, - Flatten = 0x00000001, - DontFlatten = 0x00000002, -}; +typedef enum SpvSelectionControlMask_ { + SpvSelectionControlMaskNone = 0, + SpvSelectionControlFlattenMask = 0x00000001, + SpvSelectionControlDontFlattenMask = 0x00000002, +} SpvSelectionControlMask; -enum class LoopControlShift : unsigned { - Unroll = 0, - DontUnroll = 1, -}; +typedef enum SpvLoopControlShift_ { + SpvLoopControlUnrollShift = 0, + SpvLoopControlDontUnrollShift = 1, +} SpvLoopControlShift; -enum class LoopControlMask : unsigned { - MaskNone = 0, - Unroll = 0x00000001, - DontUnroll = 0x00000002, -}; +typedef enum SpvLoopControlMask_ { + SpvLoopControlMaskNone = 0, + SpvLoopControlUnrollMask = 0x00000001, + SpvLoopControlDontUnrollMask = 0x00000002, +} SpvLoopControlMask; -enum class FunctionControlShift : unsigned { - Inline = 0, - DontInline = 1, - Pure = 2, - Const = 3, -}; +typedef enum SpvFunctionControlShift_ { + SpvFunctionControlInlineShift = 0, + SpvFunctionControlDontInlineShift = 1, + SpvFunctionControlPureShift = 2, + SpvFunctionControlConstShift = 3, +} SpvFunctionControlShift; -enum class FunctionControlMask : unsigned { - MaskNone = 0, - Inline = 0x00000001, - DontInline = 0x00000002, - Pure = 0x00000004, - Const = 0x00000008, -}; +typedef enum SpvFunctionControlMask_ { + SpvFunctionControlMaskNone = 0, + SpvFunctionControlInlineMask = 0x00000001, + SpvFunctionControlDontInlineMask = 0x00000002, + SpvFunctionControlPureMask = 0x00000004, + SpvFunctionControlConstMask = 0x00000008, +} SpvFunctionControlMask; -enum class MemorySemanticsShift : unsigned { - Acquire = 1, - Release = 2, - AcquireRelease = 3, - SequentiallyConsistent = 4, - UniformMemory = 6, - SubgroupMemory = 7, - WorkgroupMemory = 8, - CrossWorkgroupMemory = 9, - AtomicCounterMemory = 10, - ImageMemory = 11, -}; +typedef enum SpvMemorySemanticsShift_ { + SpvMemorySemanticsAcquireShift = 1, + SpvMemorySemanticsReleaseShift = 2, + SpvMemorySemanticsAcquireReleaseShift = 3, + SpvMemorySemanticsSequentiallyConsistentShift = 4, + SpvMemorySemanticsUniformMemoryShift = 6, + SpvMemorySemanticsSubgroupMemoryShift = 7, + SpvMemorySemanticsWorkgroupMemoryShift = 8, + SpvMemorySemanticsCrossWorkgroupMemoryShift = 9, + SpvMemorySemanticsAtomicCounterMemoryShift = 10, + SpvMemorySemanticsImageMemoryShift = 11, +} SpvMemorySemanticsShift; -enum class MemorySemanticsMask : unsigned { - MaskNone = 0, - Acquire = 0x00000002, - Release = 0x00000004, - AcquireRelease = 0x00000008, - SequentiallyConsistent = 0x00000010, - UniformMemory = 0x00000040, - SubgroupMemory = 0x00000080, - WorkgroupMemory = 0x00000100, - CrossWorkgroupMemory = 0x00000200, - AtomicCounterMemory = 0x00000400, - ImageMemory = 0x00000800, -}; +typedef enum SpvMemorySemanticsMask_ { + SpvMemorySemanticsMaskNone = 0, + SpvMemorySemanticsAcquireMask = 0x00000002, + SpvMemorySemanticsReleaseMask = 0x00000004, + SpvMemorySemanticsAcquireReleaseMask = 0x00000008, + SpvMemorySemanticsSequentiallyConsistentMask = 0x00000010, + SpvMemorySemanticsUniformMemoryMask = 0x00000040, + SpvMemorySemanticsSubgroupMemoryMask = 0x00000080, + SpvMemorySemanticsWorkgroupMemoryMask = 0x00000100, + SpvMemorySemanticsCrossWorkgroupMemoryMask = 0x00000200, + SpvMemorySemanticsAtomicCounterMemoryMask = 0x00000400, + SpvMemorySemanticsImageMemoryMask = 0x00000800, +} SpvMemorySemanticsMask; -enum class MemoryAccessShift : unsigned { - Volatile = 0, - Aligned = 1, - Nontemporal = 2, -}; +typedef enum SpvMemoryAccessShift_ { + SpvMemoryAccessVolatileShift = 0, + SpvMemoryAccessAlignedShift = 1, + SpvMemoryAccessNontemporalShift = 2, +} SpvMemoryAccessShift; -enum class MemoryAccessMask : unsigned { - MaskNone = 0, - Volatile = 0x00000001, - Aligned = 0x00000002, - Nontemporal = 0x00000004, -}; +typedef enum SpvMemoryAccessMask_ { + SpvMemoryAccessMaskNone = 0, + SpvMemoryAccessVolatileMask = 0x00000001, + SpvMemoryAccessAlignedMask = 0x00000002, + SpvMemoryAccessNontemporalMask = 0x00000004, +} SpvMemoryAccessMask; -enum class Scope : unsigned { - CrossDevice = 0, - Device = 1, - Workgroup = 2, - Subgroup = 3, - Invocation = 4, -}; +typedef enum SpvScope_ { + SpvScopeCrossDevice = 0, + SpvScopeDevice = 1, + SpvScopeWorkgroup = 2, + SpvScopeSubgroup = 3, + SpvScopeInvocation = 4, +} SpvScope; -enum class GroupOperation : unsigned { - Reduce = 0, - InclusiveScan = 1, - ExclusiveScan = 2, -}; +typedef enum SpvGroupOperation_ { + SpvGroupOperationReduce = 0, + SpvGroupOperationInclusiveScan = 1, + SpvGroupOperationExclusiveScan = 2, +} SpvGroupOperation; -enum class KernelEnqueueFlags : unsigned { - NoWait = 0, - WaitKernel = 1, - WaitWorkGroup = 2, -}; +typedef enum SpvKernelEnqueueFlags_ { + SpvKernelEnqueueFlagsNoWait = 0, + SpvKernelEnqueueFlagsWaitKernel = 1, + SpvKernelEnqueueFlagsWaitWorkGroup = 2, +} SpvKernelEnqueueFlags; -enum class KernelProfilingInfoShift : unsigned { - CmdExecTime = 0, -}; +typedef enum SpvKernelProfilingInfoShift_ { + SpvKernelProfilingInfoCmdExecTimeShift = 0, +} SpvKernelProfilingInfoShift; -enum class KernelProfilingInfoMask : unsigned { - MaskNone = 0, - CmdExecTime = 0x00000001, -}; +typedef enum SpvKernelProfilingInfoMask_ { + SpvKernelProfilingInfoMaskNone = 0, + SpvKernelProfilingInfoCmdExecTimeMask = 0x00000001, +} SpvKernelProfilingInfoMask; -enum class Capability : unsigned { - Matrix = 0, - Shader = 1, - Geometry = 2, - Tessellation = 3, - Addresses = 4, - Linkage = 5, - Kernel = 6, - Vector16 = 7, - Float16Buffer = 8, - Float16 = 9, - Float64 = 10, - Int64 = 11, - Int64Atomics = 12, - ImageBasic = 13, - ImageReadWrite = 14, - ImageMipmap = 15, - Pipes = 17, - Groups = 18, - DeviceEnqueue = 19, - LiteralSampler = 20, - AtomicStorage = 21, - Int16 = 22, - TessellationPointSize = 23, - GeometryPointSize = 24, - ImageGatherExtended = 25, - StorageImageMultisample = 27, - UniformBufferArrayDynamicIndexing = 28, - SampledImageArrayDynamicIndexing = 29, - StorageBufferArrayDynamicIndexing = 30, - StorageImageArrayDynamicIndexing = 31, - ClipDistance = 32, - CullDistance = 33, - ImageCubeArray = 34, - SampleRateShading = 35, - ImageRect = 36, - SampledRect = 37, - GenericPointer = 38, - Int8 = 39, - InputAttachment = 40, - SparseResidency = 41, - MinLod = 42, - Sampled1D = 43, - Image1D = 44, - SampledCubeArray = 45, - SampledBuffer = 46, - ImageBuffer = 47, - ImageMSArray = 48, - StorageImageExtendedFormats = 49, - ImageQuery = 50, - DerivativeControl = 51, - InterpolationFunction = 52, - TransformFeedback = 53, - GeometryStreams = 54, - StorageImageReadWithoutFormat = 55, - StorageImageWriteWithoutFormat = 56, -}; +typedef enum SpvCapability_ { + SpvCapabilityMatrix = 0, + SpvCapabilityShader = 1, + SpvCapabilityGeometry = 2, + SpvCapabilityTessellation = 3, + SpvCapabilityAddresses = 4, + SpvCapabilityLinkage = 5, + SpvCapabilityKernel = 6, + SpvCapabilityVector16 = 7, + SpvCapabilityFloat16Buffer = 8, + SpvCapabilityFloat16 = 9, + SpvCapabilityFloat64 = 10, + SpvCapabilityInt64 = 11, + SpvCapabilityInt64Atomics = 12, + SpvCapabilityImageBasic = 13, + SpvCapabilityImageReadWrite = 14, + SpvCapabilityImageMipmap = 15, + SpvCapabilityPipes = 17, + SpvCapabilityGroups = 18, + SpvCapabilityDeviceEnqueue = 19, + SpvCapabilityLiteralSampler = 20, + SpvCapabilityAtomicStorage = 21, + SpvCapabilityInt16 = 22, + SpvCapabilityTessellationPointSize = 23, + SpvCapabilityGeometryPointSize = 24, + SpvCapabilityImageGatherExtended = 25, + SpvCapabilityStorageImageMultisample = 27, + SpvCapabilityUniformBufferArrayDynamicIndexing = 28, + SpvCapabilitySampledImageArrayDynamicIndexing = 29, + SpvCapabilityStorageBufferArrayDynamicIndexing = 30, + SpvCapabilityStorageImageArrayDynamicIndexing = 31, + SpvCapabilityClipDistance = 32, + SpvCapabilityCullDistance = 33, + SpvCapabilityImageCubeArray = 34, + SpvCapabilitySampleRateShading = 35, + SpvCapabilityImageRect = 36, + SpvCapabilitySampledRect = 37, + SpvCapabilityGenericPointer = 38, + SpvCapabilityInt8 = 39, + SpvCapabilityInputAttachment = 40, + SpvCapabilitySparseResidency = 41, + SpvCapabilityMinLod = 42, + SpvCapabilitySampled1D = 43, + SpvCapabilityImage1D = 44, + SpvCapabilitySampledCubeArray = 45, + SpvCapabilitySampledBuffer = 46, + SpvCapabilityImageBuffer = 47, + SpvCapabilityImageMSArray = 48, + SpvCapabilityStorageImageExtendedFormats = 49, + SpvCapabilityImageQuery = 50, + SpvCapabilityDerivativeControl = 51, + SpvCapabilityInterpolationFunction = 52, + SpvCapabilityTransformFeedback = 53, + SpvCapabilityGeometryStreams = 54, + SpvCapabilityStorageImageReadWithoutFormat = 55, + SpvCapabilityStorageImageWriteWithoutFormat = 56, + SpvCapabilityMultiViewport = 57, +} SpvCapability; -enum class Op : unsigned { - OpNop = 0, - OpUndef = 1, - OpSourceContinued = 2, - OpSource = 3, - OpSourceExtension = 4, - OpName = 5, - OpMemberName = 6, - OpString = 7, - OpLine = 8, - OpExtension = 10, - OpExtInstImport = 11, - OpExtInst = 12, - OpMemoryModel = 14, - OpEntryPoint = 15, - OpExecutionMode = 16, - OpCapability = 17, - OpTypeVoid = 19, - OpTypeBool = 20, - OpTypeInt = 21, - OpTypeFloat = 22, - OpTypeVector = 23, - OpTypeMatrix = 24, - OpTypeImage = 25, - OpTypeSampler = 26, - OpTypeSampledImage = 27, - OpTypeArray = 28, - OpTypeRuntimeArray = 29, - OpTypeStruct = 30, - OpTypeOpaque = 31, - OpTypePointer = 32, - OpTypeFunction = 33, - OpTypeEvent = 34, - OpTypeDeviceEvent = 35, - OpTypeReserveId = 36, - OpTypeQueue = 37, - OpTypePipe = 38, - OpTypeForwardPointer = 39, - OpConstantTrue = 41, - OpConstantFalse = 42, - OpConstant = 43, - OpConstantComposite = 44, - OpConstantSampler = 45, - OpConstantNull = 46, - OpSpecConstantTrue = 48, - OpSpecConstantFalse = 49, - OpSpecConstant = 50, - OpSpecConstantComposite = 51, - OpSpecConstantOp = 52, - OpFunction = 54, - OpFunctionParameter = 55, - OpFunctionEnd = 56, - OpFunctionCall = 57, - OpVariable = 59, - OpImageTexelPointer = 60, - OpLoad = 61, - OpStore = 62, - OpCopyMemory = 63, - OpCopyMemorySized = 64, - OpAccessChain = 65, - OpInBoundsAccessChain = 66, - OpPtrAccessChain = 67, - OpArrayLength = 68, - OpGenericPtrMemSemantics = 69, - OpInBoundsPtrAccessChain = 70, - OpDecorate = 71, - OpMemberDecorate = 72, - OpDecorationGroup = 73, - OpGroupDecorate = 74, - OpGroupMemberDecorate = 75, - OpVectorExtractDynamic = 77, - OpVectorInsertDynamic = 78, - OpVectorShuffle = 79, - OpCompositeConstruct = 80, - OpCompositeExtract = 81, - OpCompositeInsert = 82, - OpCopyObject = 83, - OpTranspose = 84, - OpSampledImage = 86, - OpImageSampleImplicitLod = 87, - OpImageSampleExplicitLod = 88, - OpImageSampleDrefImplicitLod = 89, - OpImageSampleDrefExplicitLod = 90, - OpImageSampleProjImplicitLod = 91, - OpImageSampleProjExplicitLod = 92, - OpImageSampleProjDrefImplicitLod = 93, - OpImageSampleProjDrefExplicitLod = 94, - OpImageFetch = 95, - OpImageGather = 96, - OpImageDrefGather = 97, - OpImageRead = 98, - OpImageWrite = 99, - OpImage = 100, - OpImageQueryFormat = 101, - OpImageQueryOrder = 102, - OpImageQuerySizeLod = 103, - OpImageQuerySize = 104, - OpImageQueryLod = 105, - OpImageQueryLevels = 106, - OpImageQuerySamples = 107, - OpConvertFToU = 109, - OpConvertFToS = 110, - OpConvertSToF = 111, - OpConvertUToF = 112, - OpUConvert = 113, - OpSConvert = 114, - OpFConvert = 115, - OpQuantizeToF16 = 116, - OpConvertPtrToU = 117, - OpSatConvertSToU = 118, - OpSatConvertUToS = 119, - OpConvertUToPtr = 120, - OpPtrCastToGeneric = 121, - OpGenericCastToPtr = 122, - OpGenericCastToPtrExplicit = 123, - OpBitcast = 124, - OpSNegate = 126, - OpFNegate = 127, - OpIAdd = 128, - OpFAdd = 129, - OpISub = 130, - OpFSub = 131, - OpIMul = 132, - OpFMul = 133, - OpUDiv = 134, - OpSDiv = 135, - OpFDiv = 136, - OpUMod = 137, - OpSRem = 138, - OpSMod = 139, - OpFRem = 140, - OpFMod = 141, - OpVectorTimesScalar = 142, - OpMatrixTimesScalar = 143, - OpVectorTimesMatrix = 144, - OpMatrixTimesVector = 145, - OpMatrixTimesMatrix = 146, - OpOuterProduct = 147, - OpDot = 148, - OpIAddCarry = 149, - OpISubBorrow = 150, - OpUMulExtended = 151, - OpSMulExtended = 152, - OpAny = 154, - OpAll = 155, - OpIsNan = 156, - OpIsInf = 157, - OpIsFinite = 158, - OpIsNormal = 159, - OpSignBitSet = 160, - OpLessOrGreater = 161, - OpOrdered = 162, - OpUnordered = 163, - OpLogicalEqual = 164, - OpLogicalNotEqual = 165, - OpLogicalOr = 166, - OpLogicalAnd = 167, - OpLogicalNot = 168, - OpSelect = 169, - OpIEqual = 170, - OpINotEqual = 171, - OpUGreaterThan = 172, - OpSGreaterThan = 173, - OpUGreaterThanEqual = 174, - OpSGreaterThanEqual = 175, - OpULessThan = 176, - OpSLessThan = 177, - OpULessThanEqual = 178, - OpSLessThanEqual = 179, - OpFOrdEqual = 180, - OpFUnordEqual = 181, - OpFOrdNotEqual = 182, - OpFUnordNotEqual = 183, - OpFOrdLessThan = 184, - OpFUnordLessThan = 185, - OpFOrdGreaterThan = 186, - OpFUnordGreaterThan = 187, - OpFOrdLessThanEqual = 188, - OpFUnordLessThanEqual = 189, - OpFOrdGreaterThanEqual = 190, - OpFUnordGreaterThanEqual = 191, - OpShiftRightLogical = 194, - OpShiftRightArithmetic = 195, - OpShiftLeftLogical = 196, - OpBitwiseOr = 197, - OpBitwiseXor = 198, - OpBitwiseAnd = 199, - OpNot = 200, - OpBitFieldInsert = 201, - OpBitFieldSExtract = 202, - OpBitFieldUExtract = 203, - OpBitReverse = 204, - OpBitCount = 205, - OpDPdx = 207, - OpDPdy = 208, - OpFwidth = 209, - OpDPdxFine = 210, - OpDPdyFine = 211, - OpFwidthFine = 212, - OpDPdxCoarse = 213, - OpDPdyCoarse = 214, - OpFwidthCoarse = 215, - OpEmitVertex = 218, - OpEndPrimitive = 219, - OpEmitStreamVertex = 220, - OpEndStreamPrimitive = 221, - OpControlBarrier = 224, - OpMemoryBarrier = 225, - OpAtomicLoad = 227, - OpAtomicStore = 228, - OpAtomicExchange = 229, - OpAtomicCompareExchange = 230, - OpAtomicCompareExchangeWeak = 231, - OpAtomicIIncrement = 232, - OpAtomicIDecrement = 233, - OpAtomicIAdd = 234, - OpAtomicISub = 235, - OpAtomicSMin = 236, - OpAtomicUMin = 237, - OpAtomicSMax = 238, - OpAtomicUMax = 239, - OpAtomicAnd = 240, - OpAtomicOr = 241, - OpAtomicXor = 242, - OpPhi = 245, - OpLoopMerge = 246, - OpSelectionMerge = 247, - OpLabel = 248, - OpBranch = 249, - OpBranchConditional = 250, - OpSwitch = 251, - OpKill = 252, - OpReturn = 253, - OpReturnValue = 254, - OpUnreachable = 255, - OpLifetimeStart = 256, - OpLifetimeStop = 257, - OpGroupAsyncCopy = 259, - OpGroupWaitEvents = 260, - OpGroupAll = 261, - OpGroupAny = 262, - OpGroupBroadcast = 263, - OpGroupIAdd = 264, - OpGroupFAdd = 265, - OpGroupFMin = 266, - OpGroupUMin = 267, - OpGroupSMin = 268, - OpGroupFMax = 269, - OpGroupUMax = 270, - OpGroupSMax = 271, - OpReadPipe = 274, - OpWritePipe = 275, - OpReservedReadPipe = 276, - OpReservedWritePipe = 277, - OpReserveReadPipePackets = 278, - OpReserveWritePipePackets = 279, - OpCommitReadPipe = 280, - OpCommitWritePipe = 281, - OpIsValidReserveId = 282, - OpGetNumPipePackets = 283, - OpGetMaxPipePackets = 284, - OpGroupReserveReadPipePackets = 285, - OpGroupReserveWritePipePackets = 286, - OpGroupCommitReadPipe = 287, - OpGroupCommitWritePipe = 288, - OpEnqueueMarker = 291, - OpEnqueueKernel = 292, - OpGetKernelNDrangeSubGroupCount = 293, - OpGetKernelNDrangeMaxSubGroupSize = 294, - OpGetKernelWorkGroupSize = 295, - OpGetKernelPreferredWorkGroupSizeMultiple = 296, - OpRetainEvent = 297, - OpReleaseEvent = 298, - OpCreateUserEvent = 299, - OpIsValidEvent = 300, - OpSetUserEventStatus = 301, - OpCaptureEventProfilingInfo = 302, - OpGetDefaultQueue = 303, - OpBuildNDRange = 304, - OpImageSparseSampleImplicitLod = 305, - OpImageSparseSampleExplicitLod = 306, - OpImageSparseSampleDrefImplicitLod = 307, - OpImageSparseSampleDrefExplicitLod = 308, - OpImageSparseSampleProjImplicitLod = 309, - OpImageSparseSampleProjExplicitLod = 310, - OpImageSparseSampleProjDrefImplicitLod = 311, - OpImageSparseSampleProjDrefExplicitLod = 312, - OpImageSparseFetch = 313, - OpImageSparseGather = 314, - OpImageSparseDrefGather = 315, - OpImageSparseTexelsResident = 316, - OpNoLine = 317, - OpAtomicFlagTestAndSet = 318, - OpAtomicFlagClear = 319, -}; +typedef enum SpvOp_ { + SpvOpNop = 0, + SpvOpUndef = 1, + SpvOpSourceContinued = 2, + SpvOpSource = 3, + SpvOpSourceExtension = 4, + SpvOpName = 5, + SpvOpMemberName = 6, + SpvOpString = 7, + SpvOpLine = 8, + SpvOpExtension = 10, + SpvOpExtInstImport = 11, + SpvOpExtInst = 12, + SpvOpMemoryModel = 14, + SpvOpEntryPoint = 15, + SpvOpExecutionMode = 16, + SpvOpCapability = 17, + SpvOpTypeVoid = 19, + SpvOpTypeBool = 20, + SpvOpTypeInt = 21, + SpvOpTypeFloat = 22, + SpvOpTypeVector = 23, + SpvOpTypeMatrix = 24, + SpvOpTypeImage = 25, + SpvOpTypeSampler = 26, + SpvOpTypeSampledImage = 27, + SpvOpTypeArray = 28, + SpvOpTypeRuntimeArray = 29, + SpvOpTypeStruct = 30, + SpvOpTypeOpaque = 31, + SpvOpTypePointer = 32, + SpvOpTypeFunction = 33, + SpvOpTypeEvent = 34, + SpvOpTypeDeviceEvent = 35, + SpvOpTypeReserveId = 36, + SpvOpTypeQueue = 37, + SpvOpTypePipe = 38, + SpvOpTypeForwardPointer = 39, + SpvOpConstantTrue = 41, + SpvOpConstantFalse = 42, + SpvOpConstant = 43, + SpvOpConstantComposite = 44, + SpvOpConstantSampler = 45, + SpvOpConstantNull = 46, + SpvOpSpecConstantTrue = 48, + SpvOpSpecConstantFalse = 49, + SpvOpSpecConstant = 50, + SpvOpSpecConstantComposite = 51, + SpvOpSpecConstantOp = 52, + SpvOpFunction = 54, + SpvOpFunctionParameter = 55, + SpvOpFunctionEnd = 56, + SpvOpFunctionCall = 57, + SpvOpVariable = 59, + SpvOpImageTexelPointer = 60, + SpvOpLoad = 61, + SpvOpStore = 62, + SpvOpCopyMemory = 63, + SpvOpCopyMemorySized = 64, + SpvOpAccessChain = 65, + SpvOpInBoundsAccessChain = 66, + SpvOpPtrAccessChain = 67, + SpvOpArrayLength = 68, + SpvOpGenericPtrMemSemantics = 69, + SpvOpInBoundsPtrAccessChain = 70, + SpvOpDecorate = 71, + SpvOpMemberDecorate = 72, + SpvOpDecorationGroup = 73, + SpvOpGroupDecorate = 74, + SpvOpGroupMemberDecorate = 75, + SpvOpVectorExtractDynamic = 77, + SpvOpVectorInsertDynamic = 78, + SpvOpVectorShuffle = 79, + SpvOpCompositeConstruct = 80, + SpvOpCompositeExtract = 81, + SpvOpCompositeInsert = 82, + SpvOpCopyObject = 83, + SpvOpTranspose = 84, + SpvOpSampledImage = 86, + SpvOpImageSampleImplicitLod = 87, + SpvOpImageSampleExplicitLod = 88, + SpvOpImageSampleDrefImplicitLod = 89, + SpvOpImageSampleDrefExplicitLod = 90, + SpvOpImageSampleProjImplicitLod = 91, + SpvOpImageSampleProjExplicitLod = 92, + SpvOpImageSampleProjDrefImplicitLod = 93, + SpvOpImageSampleProjDrefExplicitLod = 94, + SpvOpImageFetch = 95, + SpvOpImageGather = 96, + SpvOpImageDrefGather = 97, + SpvOpImageRead = 98, + SpvOpImageWrite = 99, + SpvOpImage = 100, + SpvOpImageQueryFormat = 101, + SpvOpImageQueryOrder = 102, + SpvOpImageQuerySizeLod = 103, + SpvOpImageQuerySize = 104, + SpvOpImageQueryLod = 105, + SpvOpImageQueryLevels = 106, + SpvOpImageQuerySamples = 107, + SpvOpConvertFToU = 109, + SpvOpConvertFToS = 110, + SpvOpConvertSToF = 111, + SpvOpConvertUToF = 112, + SpvOpUConvert = 113, + SpvOpSConvert = 114, + SpvOpFConvert = 115, + SpvOpQuantizeToF16 = 116, + SpvOpConvertPtrToU = 117, + SpvOpSatConvertSToU = 118, + SpvOpSatConvertUToS = 119, + SpvOpConvertUToPtr = 120, + SpvOpPtrCastToGeneric = 121, + SpvOpGenericCastToPtr = 122, + SpvOpGenericCastToPtrExplicit = 123, + SpvOpBitcast = 124, + SpvOpSNegate = 126, + SpvOpFNegate = 127, + SpvOpIAdd = 128, + SpvOpFAdd = 129, + SpvOpISub = 130, + SpvOpFSub = 131, + SpvOpIMul = 132, + SpvOpFMul = 133, + SpvOpUDiv = 134, + SpvOpSDiv = 135, + SpvOpFDiv = 136, + SpvOpUMod = 137, + SpvOpSRem = 138, + SpvOpSMod = 139, + SpvOpFRem = 140, + SpvOpFMod = 141, + SpvOpVectorTimesScalar = 142, + SpvOpMatrixTimesScalar = 143, + SpvOpVectorTimesMatrix = 144, + SpvOpMatrixTimesVector = 145, + SpvOpMatrixTimesMatrix = 146, + SpvOpOuterProduct = 147, + SpvOpDot = 148, + SpvOpIAddCarry = 149, + SpvOpISubBorrow = 150, + SpvOpUMulExtended = 151, + SpvOpSMulExtended = 152, + SpvOpAny = 154, + SpvOpAll = 155, + SpvOpIsNan = 156, + SpvOpIsInf = 157, + SpvOpIsFinite = 158, + SpvOpIsNormal = 159, + SpvOpSignBitSet = 160, + SpvOpLessOrGreater = 161, + SpvOpOrdered = 162, + SpvOpUnordered = 163, + SpvOpLogicalEqual = 164, + SpvOpLogicalNotEqual = 165, + SpvOpLogicalOr = 166, + SpvOpLogicalAnd = 167, + SpvOpLogicalNot = 168, + SpvOpSelect = 169, + SpvOpIEqual = 170, + SpvOpINotEqual = 171, + SpvOpUGreaterThan = 172, + SpvOpSGreaterThan = 173, + SpvOpUGreaterThanEqual = 174, + SpvOpSGreaterThanEqual = 175, + SpvOpULessThan = 176, + SpvOpSLessThan = 177, + SpvOpULessThanEqual = 178, + SpvOpSLessThanEqual = 179, + SpvOpFOrdEqual = 180, + SpvOpFUnordEqual = 181, + SpvOpFOrdNotEqual = 182, + SpvOpFUnordNotEqual = 183, + SpvOpFOrdLessThan = 184, + SpvOpFUnordLessThan = 185, + SpvOpFOrdGreaterThan = 186, + SpvOpFUnordGreaterThan = 187, + SpvOpFOrdLessThanEqual = 188, + SpvOpFUnordLessThanEqual = 189, + SpvOpFOrdGreaterThanEqual = 190, + SpvOpFUnordGreaterThanEqual = 191, + SpvOpShiftRightLogical = 194, + SpvOpShiftRightArithmetic = 195, + SpvOpShiftLeftLogical = 196, + SpvOpBitwiseOr = 197, + SpvOpBitwiseXor = 198, + SpvOpBitwiseAnd = 199, + SpvOpNot = 200, + SpvOpBitFieldInsert = 201, + SpvOpBitFieldSExtract = 202, + SpvOpBitFieldUExtract = 203, + SpvOpBitReverse = 204, + SpvOpBitCount = 205, + SpvOpDPdx = 207, + SpvOpDPdy = 208, + SpvOpFwidth = 209, + SpvOpDPdxFine = 210, + SpvOpDPdyFine = 211, + SpvOpFwidthFine = 212, + SpvOpDPdxCoarse = 213, + SpvOpDPdyCoarse = 214, + SpvOpFwidthCoarse = 215, + SpvOpEmitVertex = 218, + SpvOpEndPrimitive = 219, + SpvOpEmitStreamVertex = 220, + SpvOpEndStreamPrimitive = 221, + SpvOpControlBarrier = 224, + SpvOpMemoryBarrier = 225, + SpvOpAtomicLoad = 227, + SpvOpAtomicStore = 228, + SpvOpAtomicExchange = 229, + SpvOpAtomicCompareExchange = 230, + SpvOpAtomicCompareExchangeWeak = 231, + SpvOpAtomicIIncrement = 232, + SpvOpAtomicIDecrement = 233, + SpvOpAtomicIAdd = 234, + SpvOpAtomicISub = 235, + SpvOpAtomicSMin = 236, + SpvOpAtomicUMin = 237, + SpvOpAtomicSMax = 238, + SpvOpAtomicUMax = 239, + SpvOpAtomicAnd = 240, + SpvOpAtomicOr = 241, + SpvOpAtomicXor = 242, + SpvOpPhi = 245, + SpvOpLoopMerge = 246, + SpvOpSelectionMerge = 247, + SpvOpLabel = 248, + SpvOpBranch = 249, + SpvOpBranchConditional = 250, + SpvOpSwitch = 251, + SpvOpKill = 252, + SpvOpReturn = 253, + SpvOpReturnValue = 254, + SpvOpUnreachable = 255, + SpvOpLifetimeStart = 256, + SpvOpLifetimeStop = 257, + SpvOpGroupAsyncCopy = 259, + SpvOpGroupWaitEvents = 260, + SpvOpGroupAll = 261, + SpvOpGroupAny = 262, + SpvOpGroupBroadcast = 263, + SpvOpGroupIAdd = 264, + SpvOpGroupFAdd = 265, + SpvOpGroupFMin = 266, + SpvOpGroupUMin = 267, + SpvOpGroupSMin = 268, + SpvOpGroupFMax = 269, + SpvOpGroupUMax = 270, + SpvOpGroupSMax = 271, + SpvOpReadPipe = 274, + SpvOpWritePipe = 275, + SpvOpReservedReadPipe = 276, + SpvOpReservedWritePipe = 277, + SpvOpReserveReadPipePackets = 278, + SpvOpReserveWritePipePackets = 279, + SpvOpCommitReadPipe = 280, + SpvOpCommitWritePipe = 281, + SpvOpIsValidReserveId = 282, + SpvOpGetNumPipePackets = 283, + SpvOpGetMaxPipePackets = 284, + SpvOpGroupReserveReadPipePackets = 285, + SpvOpGroupReserveWritePipePackets = 286, + SpvOpGroupCommitReadPipe = 287, + SpvOpGroupCommitWritePipe = 288, + SpvOpEnqueueMarker = 291, + SpvOpEnqueueKernel = 292, + SpvOpGetKernelNDrangeSubGroupCount = 293, + SpvOpGetKernelNDrangeMaxSubGroupSize = 294, + SpvOpGetKernelWorkGroupSize = 295, + SpvOpGetKernelPreferredWorkGroupSizeMultiple = 296, + SpvOpRetainEvent = 297, + SpvOpReleaseEvent = 298, + SpvOpCreateUserEvent = 299, + SpvOpIsValidEvent = 300, + SpvOpSetUserEventStatus = 301, + SpvOpCaptureEventProfilingInfo = 302, + SpvOpGetDefaultQueue = 303, + SpvOpBuildNDRange = 304, + SpvOpImageSparseSampleImplicitLod = 305, + SpvOpImageSparseSampleExplicitLod = 306, + SpvOpImageSparseSampleDrefImplicitLod = 307, + SpvOpImageSparseSampleDrefExplicitLod = 308, + SpvOpImageSparseSampleProjImplicitLod = 309, + SpvOpImageSparseSampleProjExplicitLod = 310, + SpvOpImageSparseSampleProjDrefImplicitLod = 311, + SpvOpImageSparseSampleProjDrefExplicitLod = 312, + SpvOpImageSparseFetch = 313, + SpvOpImageSparseGather = 314, + SpvOpImageSparseDrefGather = 315, + SpvOpImageSparseTexelsResident = 316, + SpvOpNoLine = 317, + SpvOpAtomicFlagTestAndSet = 318, + SpvOpAtomicFlagClear = 319, + SpvOpImageSparseRead = 320, +} SpvOp; -// Overload operator| for mask bit combining +#endif // #ifndef spirv_H -inline ImageOperandsMask operator|(ImageOperandsMask a, ImageOperandsMask b) { return ImageOperandsMask(unsigned(a) | unsigned(b)); } -inline FPFastMathModeMask operator|(FPFastMathModeMask a, FPFastMathModeMask b) { return FPFastMathModeMask(unsigned(a) | unsigned(b)); } -inline SelectionControlMask operator|(SelectionControlMask a, SelectionControlMask b) { return SelectionControlMask(unsigned(a) | unsigned(b)); } -inline LoopControlMask operator|(LoopControlMask a, LoopControlMask b) { return LoopControlMask(unsigned(a) | unsigned(b)); } -inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask b) { return FunctionControlMask(unsigned(a) | unsigned(b)); } -inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); } -inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); } -inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); } - -} // end namespace spv - -#endif // #ifndef spirv_H11 diff --git a/third_party/spirv/spirv.hpp11 b/third_party/spirv/spirv.hpp11 new file mode 100644 index 000000000..03faaac38 --- /dev/null +++ b/third_party/spirv/spirv.hpp11 @@ -0,0 +1,880 @@ +// Copyright (c) 2014-2016 The Khronos Group Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and/or associated documentation files (the "Materials"), +// to deal in the Materials without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Materials, and to permit persons to whom the +// Materials are furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Materials. +// +// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS +// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND +// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ +// +// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS +// IN THE MATERIALS. + +// This header is automatically generated by the same tool that creates +// the Binary Section of the SPIR-V specification. + +// Enumeration tokens for SPIR-V, in various styles: +// C, C++, C++11, JSON, Lua, Python +// +// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL +// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL +// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL +// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL +// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] +// +// Some tokens act like mask values, which can be OR'd together, +// while others are mutually exclusive. The mask-like ones have +// "Mask" in their name, and a parallel enum that has the shift +// amount (1 << x) for each corresponding enumerant. + +#ifndef spirv_HPP +#define spirv_HPP + +namespace spv { + +typedef unsigned int Id; + +#define SPV_VERSION 0x10000 +#define SPV_REVISION 3 + +static const unsigned int MagicNumber = 0x07230203; +static const unsigned int Version = 0x00010000; +static const unsigned int Revision = 3; +static const unsigned int OpCodeMask = 0xffff; +static const unsigned int WordCountShift = 16; + +enum class SourceLanguage : unsigned { + Unknown = 0, + ESSL = 1, + GLSL = 2, + OpenCL_C = 3, + OpenCL_CPP = 4, +}; + +enum class ExecutionModel : unsigned { + Vertex = 0, + TessellationControl = 1, + TessellationEvaluation = 2, + Geometry = 3, + Fragment = 4, + GLCompute = 5, + Kernel = 6, +}; + +enum class AddressingModel : unsigned { + Logical = 0, + Physical32 = 1, + Physical64 = 2, +}; + +enum class MemoryModel : unsigned { + Simple = 0, + GLSL450 = 1, + OpenCL = 2, +}; + +enum class ExecutionMode : unsigned { + Invocations = 0, + SpacingEqual = 1, + SpacingFractionalEven = 2, + SpacingFractionalOdd = 3, + VertexOrderCw = 4, + VertexOrderCcw = 5, + PixelCenterInteger = 6, + OriginUpperLeft = 7, + OriginLowerLeft = 8, + EarlyFragmentTests = 9, + PointMode = 10, + Xfb = 11, + DepthReplacing = 12, + DepthGreater = 14, + DepthLess = 15, + DepthUnchanged = 16, + LocalSize = 17, + LocalSizeHint = 18, + InputPoints = 19, + InputLines = 20, + InputLinesAdjacency = 21, + Triangles = 22, + InputTrianglesAdjacency = 23, + Quads = 24, + Isolines = 25, + OutputVertices = 26, + OutputPoints = 27, + OutputLineStrip = 28, + OutputTriangleStrip = 29, + VecTypeHint = 30, + ContractionOff = 31, +}; + +enum class StorageClass : unsigned { + UniformConstant = 0, + Input = 1, + Uniform = 2, + Output = 3, + Workgroup = 4, + CrossWorkgroup = 5, + Private = 6, + Function = 7, + Generic = 8, + PushConstant = 9, + AtomicCounter = 10, + Image = 11, +}; + +enum class Dim : unsigned { + Dim1D = 0, + Dim2D = 1, + Dim3D = 2, + Cube = 3, + Rect = 4, + Buffer = 5, + SubpassData = 6, +}; + +enum class SamplerAddressingMode : unsigned { + None = 0, + ClampToEdge = 1, + Clamp = 2, + Repeat = 3, + RepeatMirrored = 4, +}; + +enum class SamplerFilterMode : unsigned { + Nearest = 0, + Linear = 1, +}; + +enum class ImageFormat : unsigned { + Unknown = 0, + Rgba32f = 1, + Rgba16f = 2, + R32f = 3, + Rgba8 = 4, + Rgba8Snorm = 5, + Rg32f = 6, + Rg16f = 7, + R11fG11fB10f = 8, + R16f = 9, + Rgba16 = 10, + Rgb10A2 = 11, + Rg16 = 12, + Rg8 = 13, + R16 = 14, + R8 = 15, + Rgba16Snorm = 16, + Rg16Snorm = 17, + Rg8Snorm = 18, + R16Snorm = 19, + R8Snorm = 20, + Rgba32i = 21, + Rgba16i = 22, + Rgba8i = 23, + R32i = 24, + Rg32i = 25, + Rg16i = 26, + Rg8i = 27, + R16i = 28, + R8i = 29, + Rgba32ui = 30, + Rgba16ui = 31, + Rgba8ui = 32, + R32ui = 33, + Rgb10a2ui = 34, + Rg32ui = 35, + Rg16ui = 36, + Rg8ui = 37, + R16ui = 38, + R8ui = 39, +}; + +enum class ImageChannelOrder : unsigned { + R = 0, + A = 1, + RG = 2, + RA = 3, + RGB = 4, + RGBA = 5, + BGRA = 6, + ARGB = 7, + Intensity = 8, + Luminance = 9, + Rx = 10, + RGx = 11, + RGBx = 12, + Depth = 13, + DepthStencil = 14, + sRGB = 15, + sRGBx = 16, + sRGBA = 17, + sBGRA = 18, +}; + +enum class ImageChannelDataType : unsigned { + SnormInt8 = 0, + SnormInt16 = 1, + UnormInt8 = 2, + UnormInt16 = 3, + UnormShort565 = 4, + UnormShort555 = 5, + UnormInt101010 = 6, + SignedInt8 = 7, + SignedInt16 = 8, + SignedInt32 = 9, + UnsignedInt8 = 10, + UnsignedInt16 = 11, + UnsignedInt32 = 12, + HalfFloat = 13, + Float = 14, + UnormInt24 = 15, + UnormInt101010_2 = 16, +}; + +enum class ImageOperandsShift : unsigned { + Bias = 0, + Lod = 1, + Grad = 2, + ConstOffset = 3, + Offset = 4, + ConstOffsets = 5, + Sample = 6, + MinLod = 7, +}; + +enum class ImageOperandsMask : unsigned { + MaskNone = 0, + Bias = 0x00000001, + Lod = 0x00000002, + Grad = 0x00000004, + ConstOffset = 0x00000008, + Offset = 0x00000010, + ConstOffsets = 0x00000020, + Sample = 0x00000040, + MinLod = 0x00000080, +}; + +enum class FPFastMathModeShift : unsigned { + NotNaN = 0, + NotInf = 1, + NSZ = 2, + AllowRecip = 3, + Fast = 4, +}; + +enum class FPFastMathModeMask : unsigned { + MaskNone = 0, + NotNaN = 0x00000001, + NotInf = 0x00000002, + NSZ = 0x00000004, + AllowRecip = 0x00000008, + Fast = 0x00000010, +}; + +enum class FPRoundingMode : unsigned { + RTE = 0, + RTZ = 1, + RTP = 2, + RTN = 3, +}; + +enum class LinkageType : unsigned { + Export = 0, + Import = 1, +}; + +enum class AccessQualifier : unsigned { + ReadOnly = 0, + WriteOnly = 1, + ReadWrite = 2, +}; + +enum class FunctionParameterAttribute : unsigned { + Zext = 0, + Sext = 1, + ByVal = 2, + Sret = 3, + NoAlias = 4, + NoCapture = 5, + NoWrite = 6, + NoReadWrite = 7, +}; + +enum class Decoration : unsigned { + RelaxedPrecision = 0, + SpecId = 1, + Block = 2, + BufferBlock = 3, + RowMajor = 4, + ColMajor = 5, + ArrayStride = 6, + MatrixStride = 7, + GLSLShared = 8, + GLSLPacked = 9, + CPacked = 10, + BuiltIn = 11, + NoPerspective = 13, + Flat = 14, + Patch = 15, + Centroid = 16, + Sample = 17, + Invariant = 18, + Restrict = 19, + Aliased = 20, + Volatile = 21, + Constant = 22, + Coherent = 23, + NonWritable = 24, + NonReadable = 25, + Uniform = 26, + SaturatedConversion = 28, + Stream = 29, + Location = 30, + Component = 31, + Index = 32, + Binding = 33, + DescriptorSet = 34, + Offset = 35, + XfbBuffer = 36, + XfbStride = 37, + FuncParamAttr = 38, + FPRoundingMode = 39, + FPFastMathMode = 40, + LinkageAttributes = 41, + NoContraction = 42, + InputAttachmentIndex = 43, + Alignment = 44, +}; + +enum class BuiltIn : unsigned { + Position = 0, + PointSize = 1, + ClipDistance = 3, + CullDistance = 4, + VertexId = 5, + InstanceId = 6, + PrimitiveId = 7, + InvocationId = 8, + Layer = 9, + ViewportIndex = 10, + TessLevelOuter = 11, + TessLevelInner = 12, + TessCoord = 13, + PatchVertices = 14, + FragCoord = 15, + PointCoord = 16, + FrontFacing = 17, + SampleId = 18, + SamplePosition = 19, + SampleMask = 20, + FragDepth = 22, + HelperInvocation = 23, + NumWorkgroups = 24, + WorkgroupSize = 25, + WorkgroupId = 26, + LocalInvocationId = 27, + GlobalInvocationId = 28, + LocalInvocationIndex = 29, + WorkDim = 30, + GlobalSize = 31, + EnqueuedWorkgroupSize = 32, + GlobalOffset = 33, + GlobalLinearId = 34, + SubgroupSize = 36, + SubgroupMaxSize = 37, + NumSubgroups = 38, + NumEnqueuedSubgroups = 39, + SubgroupId = 40, + SubgroupLocalInvocationId = 41, + VertexIndex = 42, + InstanceIndex = 43, +}; + +enum class SelectionControlShift : unsigned { + Flatten = 0, + DontFlatten = 1, +}; + +enum class SelectionControlMask : unsigned { + MaskNone = 0, + Flatten = 0x00000001, + DontFlatten = 0x00000002, +}; + +enum class LoopControlShift : unsigned { + Unroll = 0, + DontUnroll = 1, +}; + +enum class LoopControlMask : unsigned { + MaskNone = 0, + Unroll = 0x00000001, + DontUnroll = 0x00000002, +}; + +enum class FunctionControlShift : unsigned { + Inline = 0, + DontInline = 1, + Pure = 2, + Const = 3, +}; + +enum class FunctionControlMask : unsigned { + MaskNone = 0, + Inline = 0x00000001, + DontInline = 0x00000002, + Pure = 0x00000004, + Const = 0x00000008, +}; + +enum class MemorySemanticsShift : unsigned { + Acquire = 1, + Release = 2, + AcquireRelease = 3, + SequentiallyConsistent = 4, + UniformMemory = 6, + SubgroupMemory = 7, + WorkgroupMemory = 8, + CrossWorkgroupMemory = 9, + AtomicCounterMemory = 10, + ImageMemory = 11, +}; + +enum class MemorySemanticsMask : unsigned { + MaskNone = 0, + Acquire = 0x00000002, + Release = 0x00000004, + AcquireRelease = 0x00000008, + SequentiallyConsistent = 0x00000010, + UniformMemory = 0x00000040, + SubgroupMemory = 0x00000080, + WorkgroupMemory = 0x00000100, + CrossWorkgroupMemory = 0x00000200, + AtomicCounterMemory = 0x00000400, + ImageMemory = 0x00000800, +}; + +enum class MemoryAccessShift : unsigned { + Volatile = 0, + Aligned = 1, + Nontemporal = 2, +}; + +enum class MemoryAccessMask : unsigned { + MaskNone = 0, + Volatile = 0x00000001, + Aligned = 0x00000002, + Nontemporal = 0x00000004, +}; + +enum class Scope : unsigned { + CrossDevice = 0, + Device = 1, + Workgroup = 2, + Subgroup = 3, + Invocation = 4, +}; + +enum class GroupOperation : unsigned { + Reduce = 0, + InclusiveScan = 1, + ExclusiveScan = 2, +}; + +enum class KernelEnqueueFlags : unsigned { + NoWait = 0, + WaitKernel = 1, + WaitWorkGroup = 2, +}; + +enum class KernelProfilingInfoShift : unsigned { + CmdExecTime = 0, +}; + +enum class KernelProfilingInfoMask : unsigned { + MaskNone = 0, + CmdExecTime = 0x00000001, +}; + +enum class Capability : unsigned { + Matrix = 0, + Shader = 1, + Geometry = 2, + Tessellation = 3, + Addresses = 4, + Linkage = 5, + Kernel = 6, + Vector16 = 7, + Float16Buffer = 8, + Float16 = 9, + Float64 = 10, + Int64 = 11, + Int64Atomics = 12, + ImageBasic = 13, + ImageReadWrite = 14, + ImageMipmap = 15, + Pipes = 17, + Groups = 18, + DeviceEnqueue = 19, + LiteralSampler = 20, + AtomicStorage = 21, + Int16 = 22, + TessellationPointSize = 23, + GeometryPointSize = 24, + ImageGatherExtended = 25, + StorageImageMultisample = 27, + UniformBufferArrayDynamicIndexing = 28, + SampledImageArrayDynamicIndexing = 29, + StorageBufferArrayDynamicIndexing = 30, + StorageImageArrayDynamicIndexing = 31, + ClipDistance = 32, + CullDistance = 33, + ImageCubeArray = 34, + SampleRateShading = 35, + ImageRect = 36, + SampledRect = 37, + GenericPointer = 38, + Int8 = 39, + InputAttachment = 40, + SparseResidency = 41, + MinLod = 42, + Sampled1D = 43, + Image1D = 44, + SampledCubeArray = 45, + SampledBuffer = 46, + ImageBuffer = 47, + ImageMSArray = 48, + StorageImageExtendedFormats = 49, + ImageQuery = 50, + DerivativeControl = 51, + InterpolationFunction = 52, + TransformFeedback = 53, + GeometryStreams = 54, + StorageImageReadWithoutFormat = 55, + StorageImageWriteWithoutFormat = 56, + MultiViewport = 57, +}; + +enum class Op : unsigned { + OpNop = 0, + OpUndef = 1, + OpSourceContinued = 2, + OpSource = 3, + OpSourceExtension = 4, + OpName = 5, + OpMemberName = 6, + OpString = 7, + OpLine = 8, + OpExtension = 10, + OpExtInstImport = 11, + OpExtInst = 12, + OpMemoryModel = 14, + OpEntryPoint = 15, + OpExecutionMode = 16, + OpCapability = 17, + OpTypeVoid = 19, + OpTypeBool = 20, + OpTypeInt = 21, + OpTypeFloat = 22, + OpTypeVector = 23, + OpTypeMatrix = 24, + OpTypeImage = 25, + OpTypeSampler = 26, + OpTypeSampledImage = 27, + OpTypeArray = 28, + OpTypeRuntimeArray = 29, + OpTypeStruct = 30, + OpTypeOpaque = 31, + OpTypePointer = 32, + OpTypeFunction = 33, + OpTypeEvent = 34, + OpTypeDeviceEvent = 35, + OpTypeReserveId = 36, + OpTypeQueue = 37, + OpTypePipe = 38, + OpTypeForwardPointer = 39, + OpConstantTrue = 41, + OpConstantFalse = 42, + OpConstant = 43, + OpConstantComposite = 44, + OpConstantSampler = 45, + OpConstantNull = 46, + OpSpecConstantTrue = 48, + OpSpecConstantFalse = 49, + OpSpecConstant = 50, + OpSpecConstantComposite = 51, + OpSpecConstantOp = 52, + OpFunction = 54, + OpFunctionParameter = 55, + OpFunctionEnd = 56, + OpFunctionCall = 57, + OpVariable = 59, + OpImageTexelPointer = 60, + OpLoad = 61, + OpStore = 62, + OpCopyMemory = 63, + OpCopyMemorySized = 64, + OpAccessChain = 65, + OpInBoundsAccessChain = 66, + OpPtrAccessChain = 67, + OpArrayLength = 68, + OpGenericPtrMemSemantics = 69, + OpInBoundsPtrAccessChain = 70, + OpDecorate = 71, + OpMemberDecorate = 72, + OpDecorationGroup = 73, + OpGroupDecorate = 74, + OpGroupMemberDecorate = 75, + OpVectorExtractDynamic = 77, + OpVectorInsertDynamic = 78, + OpVectorShuffle = 79, + OpCompositeConstruct = 80, + OpCompositeExtract = 81, + OpCompositeInsert = 82, + OpCopyObject = 83, + OpTranspose = 84, + OpSampledImage = 86, + OpImageSampleImplicitLod = 87, + OpImageSampleExplicitLod = 88, + OpImageSampleDrefImplicitLod = 89, + OpImageSampleDrefExplicitLod = 90, + OpImageSampleProjImplicitLod = 91, + OpImageSampleProjExplicitLod = 92, + OpImageSampleProjDrefImplicitLod = 93, + OpImageSampleProjDrefExplicitLod = 94, + OpImageFetch = 95, + OpImageGather = 96, + OpImageDrefGather = 97, + OpImageRead = 98, + OpImageWrite = 99, + OpImage = 100, + OpImageQueryFormat = 101, + OpImageQueryOrder = 102, + OpImageQuerySizeLod = 103, + OpImageQuerySize = 104, + OpImageQueryLod = 105, + OpImageQueryLevels = 106, + OpImageQuerySamples = 107, + OpConvertFToU = 109, + OpConvertFToS = 110, + OpConvertSToF = 111, + OpConvertUToF = 112, + OpUConvert = 113, + OpSConvert = 114, + OpFConvert = 115, + OpQuantizeToF16 = 116, + OpConvertPtrToU = 117, + OpSatConvertSToU = 118, + OpSatConvertUToS = 119, + OpConvertUToPtr = 120, + OpPtrCastToGeneric = 121, + OpGenericCastToPtr = 122, + OpGenericCastToPtrExplicit = 123, + OpBitcast = 124, + OpSNegate = 126, + OpFNegate = 127, + OpIAdd = 128, + OpFAdd = 129, + OpISub = 130, + OpFSub = 131, + OpIMul = 132, + OpFMul = 133, + OpUDiv = 134, + OpSDiv = 135, + OpFDiv = 136, + OpUMod = 137, + OpSRem = 138, + OpSMod = 139, + OpFRem = 140, + OpFMod = 141, + OpVectorTimesScalar = 142, + OpMatrixTimesScalar = 143, + OpVectorTimesMatrix = 144, + OpMatrixTimesVector = 145, + OpMatrixTimesMatrix = 146, + OpOuterProduct = 147, + OpDot = 148, + OpIAddCarry = 149, + OpISubBorrow = 150, + OpUMulExtended = 151, + OpSMulExtended = 152, + OpAny = 154, + OpAll = 155, + OpIsNan = 156, + OpIsInf = 157, + OpIsFinite = 158, + OpIsNormal = 159, + OpSignBitSet = 160, + OpLessOrGreater = 161, + OpOrdered = 162, + OpUnordered = 163, + OpLogicalEqual = 164, + OpLogicalNotEqual = 165, + OpLogicalOr = 166, + OpLogicalAnd = 167, + OpLogicalNot = 168, + OpSelect = 169, + OpIEqual = 170, + OpINotEqual = 171, + OpUGreaterThan = 172, + OpSGreaterThan = 173, + OpUGreaterThanEqual = 174, + OpSGreaterThanEqual = 175, + OpULessThan = 176, + OpSLessThan = 177, + OpULessThanEqual = 178, + OpSLessThanEqual = 179, + OpFOrdEqual = 180, + OpFUnordEqual = 181, + OpFOrdNotEqual = 182, + OpFUnordNotEqual = 183, + OpFOrdLessThan = 184, + OpFUnordLessThan = 185, + OpFOrdGreaterThan = 186, + OpFUnordGreaterThan = 187, + OpFOrdLessThanEqual = 188, + OpFUnordLessThanEqual = 189, + OpFOrdGreaterThanEqual = 190, + OpFUnordGreaterThanEqual = 191, + OpShiftRightLogical = 194, + OpShiftRightArithmetic = 195, + OpShiftLeftLogical = 196, + OpBitwiseOr = 197, + OpBitwiseXor = 198, + OpBitwiseAnd = 199, + OpNot = 200, + OpBitFieldInsert = 201, + OpBitFieldSExtract = 202, + OpBitFieldUExtract = 203, + OpBitReverse = 204, + OpBitCount = 205, + OpDPdx = 207, + OpDPdy = 208, + OpFwidth = 209, + OpDPdxFine = 210, + OpDPdyFine = 211, + OpFwidthFine = 212, + OpDPdxCoarse = 213, + OpDPdyCoarse = 214, + OpFwidthCoarse = 215, + OpEmitVertex = 218, + OpEndPrimitive = 219, + OpEmitStreamVertex = 220, + OpEndStreamPrimitive = 221, + OpControlBarrier = 224, + OpMemoryBarrier = 225, + OpAtomicLoad = 227, + OpAtomicStore = 228, + OpAtomicExchange = 229, + OpAtomicCompareExchange = 230, + OpAtomicCompareExchangeWeak = 231, + OpAtomicIIncrement = 232, + OpAtomicIDecrement = 233, + OpAtomicIAdd = 234, + OpAtomicISub = 235, + OpAtomicSMin = 236, + OpAtomicUMin = 237, + OpAtomicSMax = 238, + OpAtomicUMax = 239, + OpAtomicAnd = 240, + OpAtomicOr = 241, + OpAtomicXor = 242, + OpPhi = 245, + OpLoopMerge = 246, + OpSelectionMerge = 247, + OpLabel = 248, + OpBranch = 249, + OpBranchConditional = 250, + OpSwitch = 251, + OpKill = 252, + OpReturn = 253, + OpReturnValue = 254, + OpUnreachable = 255, + OpLifetimeStart = 256, + OpLifetimeStop = 257, + OpGroupAsyncCopy = 259, + OpGroupWaitEvents = 260, + OpGroupAll = 261, + OpGroupAny = 262, + OpGroupBroadcast = 263, + OpGroupIAdd = 264, + OpGroupFAdd = 265, + OpGroupFMin = 266, + OpGroupUMin = 267, + OpGroupSMin = 268, + OpGroupFMax = 269, + OpGroupUMax = 270, + OpGroupSMax = 271, + OpReadPipe = 274, + OpWritePipe = 275, + OpReservedReadPipe = 276, + OpReservedWritePipe = 277, + OpReserveReadPipePackets = 278, + OpReserveWritePipePackets = 279, + OpCommitReadPipe = 280, + OpCommitWritePipe = 281, + OpIsValidReserveId = 282, + OpGetNumPipePackets = 283, + OpGetMaxPipePackets = 284, + OpGroupReserveReadPipePackets = 285, + OpGroupReserveWritePipePackets = 286, + OpGroupCommitReadPipe = 287, + OpGroupCommitWritePipe = 288, + OpEnqueueMarker = 291, + OpEnqueueKernel = 292, + OpGetKernelNDrangeSubGroupCount = 293, + OpGetKernelNDrangeMaxSubGroupSize = 294, + OpGetKernelWorkGroupSize = 295, + OpGetKernelPreferredWorkGroupSizeMultiple = 296, + OpRetainEvent = 297, + OpReleaseEvent = 298, + OpCreateUserEvent = 299, + OpIsValidEvent = 300, + OpSetUserEventStatus = 301, + OpCaptureEventProfilingInfo = 302, + OpGetDefaultQueue = 303, + OpBuildNDRange = 304, + OpImageSparseSampleImplicitLod = 305, + OpImageSparseSampleExplicitLod = 306, + OpImageSparseSampleDrefImplicitLod = 307, + OpImageSparseSampleDrefExplicitLod = 308, + OpImageSparseSampleProjImplicitLod = 309, + OpImageSparseSampleProjExplicitLod = 310, + OpImageSparseSampleProjDrefImplicitLod = 311, + OpImageSparseSampleProjDrefExplicitLod = 312, + OpImageSparseFetch = 313, + OpImageSparseGather = 314, + OpImageSparseDrefGather = 315, + OpImageSparseTexelsResident = 316, + OpNoLine = 317, + OpAtomicFlagTestAndSet = 318, + OpAtomicFlagClear = 319, + OpImageSparseRead = 320, +}; + +// Overload operator| for mask bit combining + +inline ImageOperandsMask operator|(ImageOperandsMask a, ImageOperandsMask b) { return ImageOperandsMask(unsigned(a) | unsigned(b)); } +inline FPFastMathModeMask operator|(FPFastMathModeMask a, FPFastMathModeMask b) { return FPFastMathModeMask(unsigned(a) | unsigned(b)); } +inline SelectionControlMask operator|(SelectionControlMask a, SelectionControlMask b) { return SelectionControlMask(unsigned(a) | unsigned(b)); } +inline LoopControlMask operator|(LoopControlMask a, LoopControlMask b) { return LoopControlMask(unsigned(a) | unsigned(b)); } +inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask b) { return FunctionControlMask(unsigned(a) | unsigned(b)); } +inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); } +inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); } +inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); } + +} // end namespace spv + +#endif // #ifndef spirv_HPP + diff --git a/third_party/vulkan/icd-spv.h b/third_party/vulkan/icd-spv.h new file mode 100644 index 000000000..b6640a4b3 --- /dev/null +++ b/third_party/vulkan/icd-spv.h @@ -0,0 +1,42 @@ +/* + * + * Copyright (C) 2015-2016 Valve Corporation + * Copyright (C) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Author: Cody Northrop + * + */ + +#ifndef ICD_SPV_H +#define ICD_SPV_H + +#include + +#define ICD_SPV_MAGIC 0x07230203 +#define ICD_SPV_VERSION 99 + +struct icd_spv_header { + uint32_t magic; + uint32_t version; + uint32_t gen_magic; // Generator's magic number +}; + +#endif /* ICD_SPV_H */ diff --git a/third_party/vulkan/loader/cJSON.c b/third_party/vulkan/loader/cJSON.c new file mode 100644 index 000000000..097866032 --- /dev/null +++ b/third_party/vulkan/loader/cJSON.c @@ -0,0 +1,1358 @@ +/* + Copyright (c) 2009 Dave Gamble + Copyright (c) 2015-2016 The Khronos Group Inc. + Copyright (c) 2015-2016 Valve Corporation + Copyright (c) 2015-2016 LunarG, Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ + +/* cJSON */ +/* JSON parser in C. */ + +#include +#include +#include +#include +#include +#include +#include +#include "cJSON.h" + +static const char *ep; + +const char *cJSON_GetErrorPtr(void) { return ep; } + +static void *(*cJSON_malloc)(size_t sz) = malloc; +static void (*cJSON_free)(void *ptr) = free; + +static char *cJSON_strdup(const char *str) { + size_t len; + char *copy; + + len = strlen(str) + 1; + if (!(copy = (char *)cJSON_malloc(len))) + return 0; + memcpy(copy, str, len); + return copy; +} + +void cJSON_InitHooks(cJSON_Hooks *hooks) { + if (!hooks) { /* Reset hooks */ + cJSON_malloc = malloc; + cJSON_free = free; + return; + } + + cJSON_malloc = (hooks->malloc_fn) ? hooks->malloc_fn : malloc; + cJSON_free = (hooks->free_fn) ? hooks->free_fn : free; +} + +/* Internal constructor. */ +static cJSON *cJSON_New_Item(void) { + cJSON *node = (cJSON *)cJSON_malloc(sizeof(cJSON)); + if (node) + memset(node, 0, sizeof(cJSON)); + return node; +} + +/* Delete a cJSON structure. */ +void cJSON_Delete(cJSON *c) { + cJSON *next; + while (c) { + next = c->next; + if (!(c->type & cJSON_IsReference) && c->child) + cJSON_Delete(c->child); + if (!(c->type & cJSON_IsReference) && c->valuestring) + cJSON_free(c->valuestring); + if (!(c->type & cJSON_StringIsConst) && c->string) + cJSON_free(c->string); + cJSON_free(c); + c = next; + } +} + +/* Parse the input text to generate a number, and populate the result into item. + */ +static const char *parse_number(cJSON *item, const char *num) { + double n = 0, sign = 1, scale = 0; + int subscale = 0, signsubscale = 1; + + if (*num == '-') + sign = -1, num++; /* Has sign? */ + if (*num == '0') + num++; /* is zero */ + if (*num >= '1' && *num <= '9') + do + n = (n * 10.0) + (*num++ - '0'); + while (*num >= '0' && *num <= '9'); /* Number? */ + if (*num == '.' && num[1] >= '0' && num[1] <= '9') { + num++; + do + n = (n * 10.0) + (*num++ - '0'), scale--; + while (*num >= '0' && *num <= '9'); + } /* Fractional part? */ + if (*num == 'e' || *num == 'E') /* Exponent? */ + { + num++; + if (*num == '+') + num++; + else if (*num == '-') + signsubscale = -1, num++; /* With sign? */ + while (*num >= '0' && *num <= '9') + subscale = (subscale * 10) + (*num++ - '0'); /* Number? */ + } + + n = sign * n * + pow(10.0, (scale + subscale * signsubscale)); /* number = +/- + number.fraction * + 10^+/- exponent */ + + item->valuedouble = n; + item->valueint = (int)n; + item->type = cJSON_Number; + return num; +} + +static size_t pow2gt(size_t x) { + --x; + x |= x >> 1; + x |= x >> 2; + x |= x >> 4; + x |= x >> 8; + x |= x >> 16; + return x + 1; +} + +typedef struct { + char *buffer; + size_t length; + size_t offset; +} printbuffer; + +static char *ensure(printbuffer *p, size_t needed) { + char *newbuffer; + size_t newsize; + if (!p || !p->buffer) + return 0; + needed += p->offset; + if (needed <= p->length) + return p->buffer + p->offset; + + newsize = pow2gt(needed); + newbuffer = (char *)cJSON_malloc(newsize); + if (!newbuffer) { + cJSON_free(p->buffer); + p->length = 0, p->buffer = 0; + return 0; + } + if (newbuffer) + memcpy(newbuffer, p->buffer, p->length); + cJSON_free(p->buffer); + p->length = newsize; + p->buffer = newbuffer; + return newbuffer + p->offset; +} + +static size_t update(printbuffer *p) { + char *str; + if (!p || !p->buffer) + return 0; + str = p->buffer + p->offset; + return p->offset + strlen(str); +} + +/* Render the number nicely from the given item into a string. */ +static char *print_number(cJSON *item, printbuffer *p) { + char *str = 0; + double d = item->valuedouble; + if (d == 0) { + if (p) + str = ensure(p, 2); + else + str = (char *)cJSON_malloc(2); /* special case for 0. */ + if (str) + strcpy(str, "0"); + } else if (fabs(((double)item->valueint) - d) <= DBL_EPSILON && + d <= INT_MAX && d >= INT_MIN) { + if (p) + str = ensure(p, 21); + else + str = (char *)cJSON_malloc( + 21); /* 2^64+1 can be represented in 21 chars. */ + if (str) + sprintf(str, "%d", item->valueint); + } else { + if (p) + str = ensure(p, 64); + else + str = (char *)cJSON_malloc(64); /* This is a nice tradeoff. */ + if (str) { + if (fabs(floor(d) - d) <= DBL_EPSILON && fabs(d) < 1.0e60) + sprintf(str, "%.0f", d); + else if (fabs(d) < 1.0e-6 || fabs(d) > 1.0e9) + sprintf(str, "%e", d); + else + sprintf(str, "%f", d); + } + } + return str; +} + +static unsigned parse_hex4(const char *str) { + unsigned h = 0; + if (*str >= '0' && *str <= '9') + h += (*str) - '0'; + else if (*str >= 'A' && *str <= 'F') + h += 10 + (*str) - 'A'; + else if (*str >= 'a' && *str <= 'f') + h += 10 + (*str) - 'a'; + else + return 0; + h = h << 4; + str++; + if (*str >= '0' && *str <= '9') + h += (*str) - '0'; + else if (*str >= 'A' && *str <= 'F') + h += 10 + (*str) - 'A'; + else if (*str >= 'a' && *str <= 'f') + h += 10 + (*str) - 'a'; + else + return 0; + h = h << 4; + str++; + if (*str >= '0' && *str <= '9') + h += (*str) - '0'; + else if (*str >= 'A' && *str <= 'F') + h += 10 + (*str) - 'A'; + else if (*str >= 'a' && *str <= 'f') + h += 10 + (*str) - 'a'; + else + return 0; + h = h << 4; + str++; + if (*str >= '0' && *str <= '9') + h += (*str) - '0'; + else if (*str >= 'A' && *str <= 'F') + h += 10 + (*str) - 'A'; + else if (*str >= 'a' && *str <= 'f') + h += 10 + (*str) - 'a'; + else + return 0; + return h; +} + +/* Parse the input text into an unescaped cstring, and populate item. */ +static const unsigned char firstByteMark[7] = {0x00, 0x00, 0xC0, 0xE0, + 0xF0, 0xF8, 0xFC}; +static const char *parse_string(cJSON *item, const char *str) { + const char *ptr = str + 1; + char *ptr2; + char *out; + int len = 0; + unsigned uc, uc2; + if (*str != '\"') { + ep = str; + return 0; + } /* not a string! */ + + while (*ptr != '\"' && *ptr && ++len) + if (*ptr++ == '\\') + ptr++; /* Skip escaped quotes. */ + + out = (char *)cJSON_malloc( + len + 1); /* This is how long we need for the string, roughly. */ + if (!out) + return 0; + + ptr = str + 1; + ptr2 = out; + while (*ptr != '\"' && *ptr) { + if (*ptr != '\\') + *ptr2++ = *ptr++; + else { + ptr++; + switch (*ptr) { + case 'b': + *ptr2++ = '\b'; + break; + case 'f': + *ptr2++ = '\f'; + break; + case 'n': + *ptr2++ = '\n'; + break; + case 'r': + *ptr2++ = '\r'; + break; + case 't': + *ptr2++ = '\t'; + break; + case 'u': /* transcode utf16 to utf8. */ + uc = parse_hex4(ptr + 1); + ptr += 4; /* get the unicode char. */ + + if ((uc >= 0xDC00 && uc <= 0xDFFF) || uc == 0) + break; /* check for invalid. */ + + if (uc >= 0xD800 && + uc <= 0xDBFF) /* UTF16 surrogate pairs. */ + { + if (ptr[1] != '\\' || ptr[2] != 'u') + break; /* missing second-half of surrogate. */ + uc2 = parse_hex4(ptr + 3); + ptr += 6; + if (uc2 < 0xDC00 || uc2 > 0xDFFF) + break; /* invalid second-half of surrogate. */ + uc = 0x10000 + (((uc & 0x3FF) << 10) | (uc2 & 0x3FF)); + } + + len = 4; + if (uc < 0x80) + len = 1; + else if (uc < 0x800) + len = 2; + else if (uc < 0x10000) + len = 3; + ptr2 += len; + + switch (len) { + case 4: + *--ptr2 = ((uc | 0x80) & 0xBF); + uc >>= 6; + case 3: + *--ptr2 = ((uc | 0x80) & 0xBF); + uc >>= 6; + case 2: + *--ptr2 = ((uc | 0x80) & 0xBF); + uc >>= 6; + case 1: + *--ptr2 = (uc | firstByteMark[len]); + } + ptr2 += len; + break; + default: + *ptr2++ = *ptr; + break; + } + ptr++; + } + } + *ptr2 = 0; + if (*ptr == '\"') + ptr++; + item->valuestring = out; + item->type = cJSON_String; + return ptr; +} + +/* Render the cstring provided to an escaped version that can be printed. */ +static char *print_string_ptr(const char *str, printbuffer *p) { + const char *ptr; + char *ptr2; + char *out; + size_t len = 0, flag = 0; + unsigned char token; + + for (ptr = str; *ptr; ptr++) + flag |= ((*ptr > 0 && *ptr < 32) || (*ptr == '\"') || (*ptr == '\\')) + ? 1 + : 0; + if (!flag) { + len = ptr - str; + if (p) + out = ensure(p, len + 3); + else + out = (char *)cJSON_malloc(len + 3); + if (!out) + return 0; + ptr2 = out; + *ptr2++ = '\"'; + strcpy(ptr2, str); + ptr2[len] = '\"'; + ptr2[len + 1] = 0; + return out; + } + + if (!str) { + if (p) + out = ensure(p, 3); + else + out = (char *)cJSON_malloc(3); + if (!out) + return 0; + strcpy(out, "\"\""); + return out; + } + ptr = str; + while ((token = *ptr) && ++len) { + if (strchr("\"\\\b\f\n\r\t", token)) + len++; + else if (token < 32) + len += 5; + ptr++; + } + + if (p) + out = ensure(p, len + 3); + else + out = (char *)cJSON_malloc(len + 3); + if (!out) + return 0; + + ptr2 = out; + ptr = str; + *ptr2++ = '\"'; + while (*ptr) { + if ((unsigned char)*ptr > 31 && *ptr != '\"' && *ptr != '\\') + *ptr2++ = *ptr++; + else { + *ptr2++ = '\\'; + switch (token = *ptr++) { + case '\\': + *ptr2++ = '\\'; + break; + case '\"': + *ptr2++ = '\"'; + break; + case '\b': + *ptr2++ = 'b'; + break; + case '\f': + *ptr2++ = 'f'; + break; + case '\n': + *ptr2++ = 'n'; + break; + case '\r': + *ptr2++ = 'r'; + break; + case '\t': + *ptr2++ = 't'; + break; + default: + sprintf(ptr2, "u%04x", token); + ptr2 += 5; + break; /* escape and print */ + } + } + } + *ptr2++ = '\"'; + *ptr2++ = 0; + return out; +} +/* Invote print_string_ptr (which is useful) on an item. */ +static char *print_string(cJSON *item, printbuffer *p) { + return print_string_ptr(item->valuestring, p); +} + +/* Predeclare these prototypes. */ +static const char *parse_value(cJSON *item, const char *value); +static char *print_value(cJSON *item, int depth, int fmt, printbuffer *p); +static const char *parse_array(cJSON *item, const char *value); +static char *print_array(cJSON *item, int depth, int fmt, printbuffer *p); +static const char *parse_object(cJSON *item, const char *value); +static char *print_object(cJSON *item, int depth, int fmt, printbuffer *p); + +/* Utility to jump whitespace and cr/lf */ +static const char *skip(const char *in) { + while (in && *in && (unsigned char)*in <= 32) + in++; + return in; +} + +/* Parse an object - create a new root, and populate. */ +cJSON *cJSON_ParseWithOpts(const char *value, const char **return_parse_end, + int require_null_terminated) { + const char *end = 0; + cJSON *c = cJSON_New_Item(); + ep = 0; + if (!c) + return 0; /* memory fail */ + + end = parse_value(c, skip(value)); + if (!end) { + cJSON_Delete(c); + return 0; + } /* parse failure. ep is set. */ + + /* if we require null-terminated JSON without appended garbage, skip and + * then check for a null terminator */ + if (require_null_terminated) { + end = skip(end); + if (*end) { + cJSON_Delete(c); + ep = end; + return 0; + } + } + if (return_parse_end) + *return_parse_end = end; + return c; +} +/* Default options for cJSON_Parse */ +cJSON *cJSON_Parse(const char *value) { + return cJSON_ParseWithOpts(value, 0, 0); +} + +/* Render a cJSON item/entity/structure to text. */ +char *cJSON_Print(cJSON *item) { return print_value(item, 0, 1, 0); } +char *cJSON_PrintUnformatted(cJSON *item) { return print_value(item, 0, 0, 0); } + +char *cJSON_PrintBuffered(cJSON *item, int prebuffer, int fmt) { + printbuffer p; + p.buffer = (char *)cJSON_malloc(prebuffer); + p.length = prebuffer; + p.offset = 0; + return print_value(item, 0, fmt, &p); + return p.buffer; +} + +/* Parser core - when encountering text, process appropriately. */ +static const char *parse_value(cJSON *item, const char *value) { + if (!value) + return 0; /* Fail on null. */ + if (!strncmp(value, "null", 4)) { + item->type = cJSON_NULL; + return value + 4; + } + if (!strncmp(value, "false", 5)) { + item->type = cJSON_False; + return value + 5; + } + if (!strncmp(value, "true", 4)) { + item->type = cJSON_True; + item->valueint = 1; + return value + 4; + } + if (*value == '\"') { + return parse_string(item, value); + } + if (*value == '-' || (*value >= '0' && *value <= '9')) { + return parse_number(item, value); + } + if (*value == '[') { + return parse_array(item, value); + } + if (*value == '{') { + return parse_object(item, value); + } + + ep = value; + return 0; /* failure. */ +} + +/* Render a value to text. */ +static char *print_value(cJSON *item, int depth, int fmt, printbuffer *p) { + char *out = 0; + if (!item) + return 0; + if (p) { + switch ((item->type) & 255) { + case cJSON_NULL: { + out = ensure(p, 5); + if (out) + strcpy(out, "null"); + break; + } + case cJSON_False: { + out = ensure(p, 6); + if (out) + strcpy(out, "false"); + break; + } + case cJSON_True: { + out = ensure(p, 5); + if (out) + strcpy(out, "true"); + break; + } + case cJSON_Number: + out = print_number(item, p); + break; + case cJSON_String: + out = print_string(item, p); + break; + case cJSON_Array: + out = print_array(item, depth, fmt, p); + break; + case cJSON_Object: + out = print_object(item, depth, fmt, p); + break; + } + } else { + switch ((item->type) & 255) { + case cJSON_NULL: + out = cJSON_strdup("null"); + break; + case cJSON_False: + out = cJSON_strdup("false"); + break; + case cJSON_True: + out = cJSON_strdup("true"); + break; + case cJSON_Number: + out = print_number(item, 0); + break; + case cJSON_String: + out = print_string(item, 0); + break; + case cJSON_Array: + out = print_array(item, depth, fmt, 0); + break; + case cJSON_Object: + out = print_object(item, depth, fmt, 0); + break; + } + } + return out; +} + +/* Build an array from input text. */ +static const char *parse_array(cJSON *item, const char *value) { + cJSON *child; + if (*value != '[') { + ep = value; + return 0; + } /* not an array! */ + + item->type = cJSON_Array; + value = skip(value + 1); + if (*value == ']') + return value + 1; /* empty array. */ + + item->child = child = cJSON_New_Item(); + if (!item->child) + return 0; /* memory fail */ + value = skip( + parse_value(child, skip(value))); /* skip any spacing, get the value. */ + if (!value) + return 0; + + while (*value == ',') { + cJSON *new_item; + if (!(new_item = cJSON_New_Item())) + return 0; /* memory fail */ + child->next = new_item; + new_item->prev = child; + child = new_item; + value = skip(parse_value(child, skip(value + 1))); + if (!value) + return 0; /* memory fail */ + } + + if (*value == ']') + return value + 1; /* end of array */ + ep = value; + return 0; /* malformed. */ +} + +/* Render an array to text */ +static char *print_array(cJSON *item, int depth, int fmt, printbuffer *p) { + char **entries; + char *out = 0, *ptr, *ret; + size_t len = 5; + cJSON *child = item->child; + int numentries = 0, fail = 0, j = 0; + size_t tmplen = 0, i = 0; + + /* How many entries in the array? */ + while (child) + numentries++, child = child->next; + /* Explicitly handle numentries==0 */ + if (!numentries) { + if (p) + out = ensure(p, 3); + else + out = (char *)cJSON_malloc(3); + if (out) + strcpy(out, "[]"); + return out; + } + + if (p) { + /* Compose the output array. */ + i = p->offset; + ptr = ensure(p, 1); + if (!ptr) + return 0; + *ptr = '['; + p->offset++; + child = item->child; + while (child && !fail) { + print_value(child, depth + 1, fmt, p); + p->offset = update(p); + if (child->next) { + len = fmt ? 2 : 1; + ptr = ensure(p, len + 1); + if (!ptr) + return 0; + *ptr++ = ','; + if (fmt) + *ptr++ = ' '; + *ptr = 0; + p->offset += len; + } + child = child->next; + } + ptr = ensure(p, 2); + if (!ptr) + return 0; + *ptr++ = ']'; + *ptr = 0; + out = (p->buffer) + i; + } else { + /* Allocate an array to hold the values for each */ + entries = (char **)cJSON_malloc(numentries * sizeof(char *)); + if (!entries) + return 0; + memset(entries, 0, numentries * sizeof(char *)); + /* Retrieve all the results: */ + child = item->child; + while (child && !fail) { + ret = print_value(child, depth + 1, fmt, 0); + entries[i++] = ret; + if (ret) + len += strlen(ret) + 2 + (fmt ? 1 : 0); + else + fail = 1; + child = child->next; + } + + /* If we didn't fail, try to malloc the output string */ + if (!fail) + out = (char *)cJSON_malloc(len); + /* If that fails, we fail. */ + if (!out) + fail = 1; + + /* Handle failure. */ + if (fail) { + for (j = 0; j < numentries; j++) + if (entries[j]) + cJSON_free(entries[j]); + cJSON_free(entries); + return 0; + } + + /* Compose the output array. */ + *out = '['; + ptr = out + 1; + *ptr = 0; + for (j = 0; j < numentries; j++) { + tmplen = strlen(entries[j]); + memcpy(ptr, entries[j], tmplen); + ptr += tmplen; + if (j != numentries - 1) { + *ptr++ = ','; + if (fmt) + *ptr++ = ' '; + *ptr = 0; + } + cJSON_free(entries[j]); + } + cJSON_free(entries); + *ptr++ = ']'; + *ptr++ = 0; + } + return out; +} + +/* Build an object from the text. */ +static const char *parse_object(cJSON *item, const char *value) { + cJSON *child; + if (*value != '{') { + ep = value; + return 0; + } /* not an object! */ + + item->type = cJSON_Object; + value = skip(value + 1); + if (*value == '}') + return value + 1; /* empty array. */ + + item->child = child = cJSON_New_Item(); + if (!item->child) + return 0; + value = skip(parse_string(child, skip(value))); + if (!value) + return 0; + child->string = child->valuestring; + child->valuestring = 0; + if (*value != ':') { + ep = value; + return 0; + } /* fail! */ + value = skip(parse_value( + child, skip(value + 1))); /* skip any spacing, get the value. */ + if (!value) + return 0; + + while (*value == ',') { + cJSON *new_item; + if (!(new_item = cJSON_New_Item())) + return 0; /* memory fail */ + child->next = new_item; + new_item->prev = child; + child = new_item; + value = skip(parse_string(child, skip(value + 1))); + if (!value) + return 0; + child->string = child->valuestring; + child->valuestring = 0; + if (*value != ':') { + ep = value; + return 0; + } /* fail! */ + value = skip(parse_value( + child, skip(value + 1))); /* skip any spacing, get the value. */ + if (!value) + return 0; + } + + if (*value == '}') + return value + 1; /* end of array */ + ep = value; + return 0; /* malformed. */ +} + +/* Render an object to text. */ +static char *print_object(cJSON *item, int depth, int fmt, printbuffer *p) { + char **entries = 0, **names = 0; + char *out = 0, *ptr, *ret, *str; + int j; + cJSON *child = item->child; + int numentries = 0, fail = 0, k; + size_t tmplen = 0, i = 0, len = 7; + /* Count the number of entries. */ + while (child) + numentries++, child = child->next; + /* Explicitly handle empty object case */ + if (!numentries) { + if (p) + out = ensure(p, fmt ? depth + 4 : 3); + else + out = (char *)cJSON_malloc(fmt ? depth + 4 : 3); + if (!out) + return 0; + ptr = out; + *ptr++ = '{'; + if (fmt) { + *ptr++ = '\n'; + for (j = 0; j < depth - 1; j++) + *ptr++ = '\t'; + } + *ptr++ = '}'; + *ptr++ = 0; + return out; + } + if (p) { + /* Compose the output: */ + i = p->offset; + len = fmt ? 2 : 1; + ptr = ensure(p, len + 1); + if (!ptr) + return 0; + *ptr++ = '{'; + if (fmt) + *ptr++ = '\n'; + *ptr = 0; + p->offset += len; + child = item->child; + depth++; + while (child) { + if (fmt) { + ptr = ensure(p, depth); + if (!ptr) + return 0; + for (j = 0; j < depth; j++) + *ptr++ = '\t'; + p->offset += depth; + } + print_string_ptr(child->string, p); + p->offset = update(p); + + len = fmt ? 2 : 1; + ptr = ensure(p, len); + if (!ptr) + return 0; + *ptr++ = ':'; + if (fmt) + *ptr++ = '\t'; + p->offset += len; + + print_value(child, depth, fmt, p); + p->offset = update(p); + + len = (fmt ? 1 : 0) + (child->next ? 1 : 0); + ptr = ensure(p, len + 1); + if (!ptr) + return 0; + if (child->next) + *ptr++ = ','; + if (fmt) + *ptr++ = '\n'; + *ptr = 0; + p->offset += len; + child = child->next; + } + ptr = ensure(p, fmt ? (depth + 1) : 2); + if (!ptr) + return 0; + if (fmt) + for (j = 0; j < depth - 1; j++) + *ptr++ = '\t'; + *ptr++ = '}'; + *ptr = 0; + out = (p->buffer) + i; + } else { + /* Allocate space for the names and the objects */ + entries = (char **)cJSON_malloc(numentries * sizeof(char *)); + if (!entries) + return 0; + names = (char **)cJSON_malloc(numentries * sizeof(char *)); + if (!names) { + cJSON_free(entries); + return 0; + } + memset(entries, 0, sizeof(char *) * numentries); + memset(names, 0, sizeof(char *) * numentries); + + /* Collect all the results into our arrays: */ + child = item->child; + depth++; + if (fmt) + len += depth; + while (child) { + names[i] = str = print_string_ptr(child->string, 0); + entries[i++] = ret = print_value(child, depth, fmt, 0); + if (str && ret) + len += strlen(ret) + strlen(str) + 2 + (fmt ? 2 + depth : 0); + else + fail = 1; + child = child->next; + } + + /* Try to allocate the output string */ + if (!fail) + out = (char *)cJSON_malloc(len); + if (!out) + fail = 1; + + /* Handle failure */ + if (fail) { + for (j = 0; j < numentries; j++) { + if (names[i]) + cJSON_free(names[j]); + if (entries[j]) + cJSON_free(entries[j]); + } + cJSON_free(names); + cJSON_free(entries); + return 0; + } + + /* Compose the output: */ + *out = '{'; + ptr = out + 1; + if (fmt) + *ptr++ = '\n'; + *ptr = 0; + for (j = 0; j < numentries; j++) { + if (fmt) + for (k = 0; k < depth; k++) + *ptr++ = '\t'; + tmplen = strlen(names[j]); + memcpy(ptr, names[j], tmplen); + ptr += tmplen; + *ptr++ = ':'; + if (fmt) + *ptr++ = '\t'; + strcpy(ptr, entries[j]); + ptr += strlen(entries[j]); + if (j != numentries - 1) + *ptr++ = ','; + if (fmt) + *ptr++ = '\n'; + *ptr = 0; + cJSON_free(names[j]); + cJSON_free(entries[j]); + } + + cJSON_free(names); + cJSON_free(entries); + if (fmt) + for (j = 0; j < depth - 1; j++) + *ptr++ = '\t'; + *ptr++ = '}'; + *ptr++ = 0; + } + return out; +} + +/* Get Array size/item / object item. */ +int cJSON_GetArraySize(cJSON *array) { + cJSON *c = array->child; + int i = 0; + while (c) + i++, c = c->next; + return i; +} +cJSON *cJSON_GetArrayItem(cJSON *array, int item) { + cJSON *c = array->child; + while (c && item > 0) + item--, c = c->next; + return c; +} +cJSON *cJSON_GetObjectItem(cJSON *object, const char *string) { + cJSON *c = object->child; + while (c && strcmp(c->string, string)) + c = c->next; + return c; +} + +/* Utility for array list handling. */ +static void suffix_object(cJSON *prev, cJSON *item) { + prev->next = item; + item->prev = prev; +} +/* Utility for handling references. */ +static cJSON *create_reference(cJSON *item) { + cJSON *ref = cJSON_New_Item(); + if (!ref) + return 0; + memcpy(ref, item, sizeof(cJSON)); + ref->string = 0; + ref->type |= cJSON_IsReference; + ref->next = ref->prev = 0; + return ref; +} + +/* Add item to array/object. */ +void cJSON_AddItemToArray(cJSON *array, cJSON *item) { + cJSON *c = array->child; + if (!item) + return; + if (!c) { + array->child = item; + } else { + while (c && c->next) + c = c->next; + suffix_object(c, item); + } +} +void cJSON_AddItemToObject(cJSON *object, const char *string, cJSON *item) { + if (!item) + return; + if (item->string) + cJSON_free(item->string); + item->string = cJSON_strdup(string); + cJSON_AddItemToArray(object, item); +} +void cJSON_AddItemToObjectCS(cJSON *object, const char *string, cJSON *item) { + if (!item) + return; + if (!(item->type & cJSON_StringIsConst) && item->string) + cJSON_free(item->string); + item->string = (char *)string; + item->type |= cJSON_StringIsConst; + cJSON_AddItemToArray(object, item); +} +void cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item) { + cJSON_AddItemToArray(array, create_reference(item)); +} +void cJSON_AddItemReferenceToObject(cJSON *object, const char *string, + cJSON *item) { + cJSON_AddItemToObject(object, string, create_reference(item)); +} + +cJSON *cJSON_DetachItemFromArray(cJSON *array, int which) { + cJSON *c = array->child; + while (c && which > 0) + c = c->next, which--; + if (!c) + return 0; + if (c->prev) + c->prev->next = c->next; + if (c->next) + c->next->prev = c->prev; + if (c == array->child) + array->child = c->next; + c->prev = c->next = 0; + return c; +} +void cJSON_DeleteItemFromArray(cJSON *array, int which) { + cJSON_Delete(cJSON_DetachItemFromArray(array, which)); +} +cJSON *cJSON_DetachItemFromObject(cJSON *object, const char *string) { + int i = 0; + cJSON *c = object->child; + while (c && strcmp(c->string, string)) + i++, c = c->next; + if (c) + return cJSON_DetachItemFromArray(object, i); + return 0; +} +void cJSON_DeleteItemFromObject(cJSON *object, const char *string) { + cJSON_Delete(cJSON_DetachItemFromObject(object, string)); +} + +/* Replace array/object items with new ones. */ +void cJSON_InsertItemInArray(cJSON *array, int which, cJSON *newitem) { + cJSON *c = array->child; + while (c && which > 0) + c = c->next, which--; + if (!c) { + cJSON_AddItemToArray(array, newitem); + return; + } + newitem->next = c; + newitem->prev = c->prev; + c->prev = newitem; + if (c == array->child) + array->child = newitem; + else + newitem->prev->next = newitem; +} +void cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem) { + cJSON *c = array->child; + while (c && which > 0) + c = c->next, which--; + if (!c) + return; + newitem->next = c->next; + newitem->prev = c->prev; + if (newitem->next) + newitem->next->prev = newitem; + if (c == array->child) + array->child = newitem; + else + newitem->prev->next = newitem; + c->next = c->prev = 0; + cJSON_Delete(c); +} +void cJSON_ReplaceItemInObject(cJSON *object, const char *string, + cJSON *newitem) { + int i = 0; + cJSON *c = object->child; + while (c && strcmp(c->string, string)) + i++, c = c->next; + if (c) { + newitem->string = cJSON_strdup(string); + cJSON_ReplaceItemInArray(object, i, newitem); + } +} + +/* Create basic types: */ +cJSON *cJSON_CreateNull(void) { + cJSON *item = cJSON_New_Item(); + if (item) + item->type = cJSON_NULL; + return item; +} +cJSON *cJSON_CreateTrue(void) { + cJSON *item = cJSON_New_Item(); + if (item) + item->type = cJSON_True; + return item; +} +cJSON *cJSON_CreateFalse(void) { + cJSON *item = cJSON_New_Item(); + if (item) + item->type = cJSON_False; + return item; +} +cJSON *cJSON_CreateBool(int b) { + cJSON *item = cJSON_New_Item(); + if (item) + item->type = b ? cJSON_True : cJSON_False; + return item; +} +cJSON *cJSON_CreateNumber(double num) { + cJSON *item = cJSON_New_Item(); + if (item) { + item->type = cJSON_Number; + item->valuedouble = num; + item->valueint = (int)num; + } + return item; +} +cJSON *cJSON_CreateString(const char *string) { + cJSON *item = cJSON_New_Item(); + if (item) { + item->type = cJSON_String; + item->valuestring = cJSON_strdup(string); + } + return item; +} +cJSON *cJSON_CreateArray(void) { + cJSON *item = cJSON_New_Item(); + if (item) + item->type = cJSON_Array; + return item; +} +cJSON *cJSON_CreateObject(void) { + cJSON *item = cJSON_New_Item(); + if (item) + item->type = cJSON_Object; + return item; +} + +/* Create Arrays: */ +cJSON *cJSON_CreateIntArray(const int *numbers, int count) { + int i; + cJSON *n = 0, *p = 0, *a = cJSON_CreateArray(); + for (i = 0; a && i < count; i++) { + n = cJSON_CreateNumber(numbers[i]); + if (!i) + a->child = n; + else + suffix_object(p, n); + p = n; + } + return a; +} +cJSON *cJSON_CreateFloatArray(const float *numbers, int count) { + int i; + cJSON *n = 0, *p = 0, *a = cJSON_CreateArray(); + for (i = 0; a && i < count; i++) { + n = cJSON_CreateNumber(numbers[i]); + if (!i) + a->child = n; + else + suffix_object(p, n); + p = n; + } + return a; +} +cJSON *cJSON_CreateDoubleArray(const double *numbers, int count) { + int i; + cJSON *n = 0, *p = 0, *a = cJSON_CreateArray(); + for (i = 0; a && i < count; i++) { + n = cJSON_CreateNumber(numbers[i]); + if (!i) + a->child = n; + else + suffix_object(p, n); + p = n; + } + return a; +} +cJSON *cJSON_CreateStringArray(const char **strings, int count) { + int i; + cJSON *n = 0, *p = 0, *a = cJSON_CreateArray(); + for (i = 0; a && i < count; i++) { + n = cJSON_CreateString(strings[i]); + if (!i) + a->child = n; + else + suffix_object(p, n); + p = n; + } + return a; +} + +/* Duplication */ +cJSON *cJSON_Duplicate(cJSON *item, int recurse) { + cJSON *newitem, *cptr, *nptr = 0, *newchild; + /* Bail on bad ptr */ + if (!item) + return 0; + /* Create new item */ + newitem = cJSON_New_Item(); + if (!newitem) + return 0; + /* Copy over all vars */ + newitem->type = item->type & (~cJSON_IsReference), + newitem->valueint = item->valueint, + newitem->valuedouble = item->valuedouble; + if (item->valuestring) { + newitem->valuestring = cJSON_strdup(item->valuestring); + if (!newitem->valuestring) { + cJSON_Delete(newitem); + return 0; + } + } + if (item->string) { + newitem->string = cJSON_strdup(item->string); + if (!newitem->string) { + cJSON_Delete(newitem); + return 0; + } + } + /* If non-recursive, then we're done! */ + if (!recurse) + return newitem; + /* Walk the ->next chain for the child. */ + cptr = item->child; + while (cptr) { + newchild = cJSON_Duplicate( + cptr, + 1); /* Duplicate (with recurse) each item in the ->next chain */ + if (!newchild) { + cJSON_Delete(newitem); + return 0; + } + if (nptr) { + nptr->next = newchild, newchild->prev = nptr; + nptr = newchild; + } /* If newitem->child already set, then crosswire ->prev and ->next and + move on */ + else { + newitem->child = newchild; + nptr = newchild; + } /* Set newitem->child and move to it */ + cptr = cptr->next; + } + return newitem; +} + +void cJSON_Minify(char *json) { + char *into = json; + while (*json) { + if (*json == ' ') + json++; + else if (*json == '\t') + json++; /* Whitespace characters. */ + else if (*json == '\r') + json++; + else if (*json == '\n') + json++; + else if (*json == '/' && json[1] == '/') + while (*json && *json != '\n') + json++; /* double-slash comments, to end of line. */ + else if (*json == '/' && json[1] == '*') { + while (*json && !(*json == '*' && json[1] == '/')) + json++; + json += 2; + } /* multiline comments. */ + else if (*json == '\"') { + *into++ = *json++; + while (*json && *json != '\"') { + if (*json == '\\') + *into++ = *json++; + *into++ = *json++; + } + *into++ = *json++; + } /* string literals, which are \" sensitive. */ + else + *into++ = *json++; /* All other characters. */ + } + *into = 0; /* and null-terminate. */ +} diff --git a/third_party/vulkan/loader/cJSON.h b/third_party/vulkan/loader/cJSON.h new file mode 100644 index 000000000..e4c747c12 --- /dev/null +++ b/third_party/vulkan/loader/cJSON.h @@ -0,0 +1,189 @@ +/* + Copyright (c) 2009 Dave Gamble + Copyright (c) 2015-2016 The Khronos Group Inc. + Copyright (c) 2015-2016 Valve Corporation + Copyright (c) 2015-2016 LunarG, Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ + +#ifndef cJSON__h +#define cJSON__h + +#ifdef __cplusplus +extern "C" { +#endif + +/* cJSON Types: */ +#define cJSON_False 0 +#define cJSON_True 1 +#define cJSON_NULL 2 +#define cJSON_Number 3 +#define cJSON_String 4 +#define cJSON_Array 5 +#define cJSON_Object 6 + +#define cJSON_IsReference 256 +#define cJSON_StringIsConst 512 + +/* The cJSON structure: */ +typedef struct cJSON { + struct cJSON *next, *prev; /* next/prev allow you to walk array/object + chains. Alternatively, use + GetArraySize/GetArrayItem/GetObjectItem */ + struct cJSON *child; /* An array or object item will have a child pointer + pointing to a chain of the items in the + array/object. */ + + int type; /* The type of the item, as above. */ + + char *valuestring; /* The item's string, if type==cJSON_String */ + int valueint; /* The item's number, if type==cJSON_Number */ + double valuedouble; /* The item's number, if type==cJSON_Number */ + + char * + string; /* The item's name string, if this item is the child of, or is + in the list of subitems of an object. */ +} cJSON; + +typedef struct cJSON_Hooks { + void *(*malloc_fn)(size_t sz); + void (*free_fn)(void *ptr); +} cJSON_Hooks; + +/* Supply malloc, realloc and free functions to cJSON */ +extern void cJSON_InitHooks(cJSON_Hooks *hooks); + +/* Supply a block of JSON, and this returns a cJSON object you can interrogate. + * Call cJSON_Delete when finished. */ +extern cJSON *cJSON_Parse(const char *value); +/* Render a cJSON entity to text for transfer/storage. Free the char* when + * finished. */ +extern char *cJSON_Print(cJSON *item); +/* Render a cJSON entity to text for transfer/storage without any formatting. + * Free the char* when finished. */ +extern char *cJSON_PrintUnformatted(cJSON *item); +/* Render a cJSON entity to text using a buffered strategy. prebuffer is a guess + * at the final size. guessing well reduces reallocation. fmt=0 gives + * unformatted, =1 gives formatted */ +extern char *cJSON_PrintBuffered(cJSON *item, int prebuffer, int fmt); +/* Delete a cJSON entity and all subentities. */ +extern void cJSON_Delete(cJSON *c); + +/* Returns the number of items in an array (or object). */ +extern int cJSON_GetArraySize(cJSON *array); +/* Retrieve item number "item" from array "array". Returns NULL if unsuccessful. + */ +extern cJSON *cJSON_GetArrayItem(cJSON *array, int item); +/* Get item "string" from object. Case insensitive. */ +extern cJSON *cJSON_GetObjectItem(cJSON *object, const char *string); + +/* For analysing failed parses. This returns a pointer to the parse error. + * You'll probably need to look a few chars back to make sense of it. Defined + * when cJSON_Parse() returns 0. 0 when cJSON_Parse() succeeds. */ +extern const char *cJSON_GetErrorPtr(void); + +/* These calls create a cJSON item of the appropriate type. */ +extern cJSON *cJSON_CreateNull(void); +extern cJSON *cJSON_CreateTrue(void); +extern cJSON *cJSON_CreateFalse(void); +extern cJSON *cJSON_CreateBool(int b); +extern cJSON *cJSON_CreateNumber(double num); +extern cJSON *cJSON_CreateString(const char *string); +extern cJSON *cJSON_CreateArray(void); +extern cJSON *cJSON_CreateObject(void); + +/* These utilities create an Array of count items. */ +extern cJSON *cJSON_CreateIntArray(const int *numbers, int count); +extern cJSON *cJSON_CreateFloatArray(const float *numbers, int count); +extern cJSON *cJSON_CreateDoubleArray(const double *numbers, int count); +extern cJSON *cJSON_CreateStringArray(const char **strings, int count); + +/* Append item to the specified array/object. */ +extern void cJSON_AddItemToArray(cJSON *array, cJSON *item); +extern void cJSON_AddItemToObject(cJSON *object, const char *string, + cJSON *item); +extern void cJSON_AddItemToObjectCS( + cJSON *object, const char *string, + cJSON *item); /* Use this when string is definitely const (i.e. a literal, + or as good as), and will definitely survive the cJSON + object */ +/* Append reference to item to the specified array/object. Use this when you + * want to add an existing cJSON to a new cJSON, but don't want to corrupt your + * existing cJSON. */ +extern void cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item); +extern void cJSON_AddItemReferenceToObject(cJSON *object, const char *string, + cJSON *item); + +/* Remove/Detatch items from Arrays/Objects. */ +extern cJSON *cJSON_DetachItemFromArray(cJSON *array, int which); +extern void cJSON_DeleteItemFromArray(cJSON *array, int which); +extern cJSON *cJSON_DetachItemFromObject(cJSON *object, const char *string); +extern void cJSON_DeleteItemFromObject(cJSON *object, const char *string); + +/* Update array items. */ +extern void cJSON_InsertItemInArray( + cJSON *array, int which, + cJSON *newitem); /* Shifts pre-existing items to the right. */ +extern void cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem); +extern void cJSON_ReplaceItemInObject(cJSON *object, const char *string, + cJSON *newitem); + +/* Duplicate a cJSON item */ +extern cJSON *cJSON_Duplicate(cJSON *item, int recurse); +/* Duplicate will create a new, identical cJSON item to the one you pass, in new +memory that will +need to be released. With recurse!=0, it will duplicate any children connected +to the item. +The item->next and ->prev pointers are always zero on return from Duplicate. */ + +/* ParseWithOpts allows you to require (and check) that the JSON is null + * terminated, and to retrieve the pointer to the final byte parsed. */ +extern cJSON *cJSON_ParseWithOpts(const char *value, + const char **return_parse_end, + int require_null_terminated); + +extern void cJSON_Minify(char *json); + +/* Macros for creating things quickly. */ +#define cJSON_AddNullToObject(object, name) \ + cJSON_AddItemToObject(object, name, cJSON_CreateNull()) +#define cJSON_AddTrueToObject(object, name) \ + cJSON_AddItemToObject(object, name, cJSON_CreateTrue()) +#define cJSON_AddFalseToObject(object, name) \ + cJSON_AddItemToObject(object, name, cJSON_CreateFalse()) +#define cJSON_AddBoolToObject(object, name, b) \ + cJSON_AddItemToObject(object, name, cJSON_CreateBool(b)) +#define cJSON_AddNumberToObject(object, name, n) \ + cJSON_AddItemToObject(object, name, cJSON_CreateNumber(n)) +#define cJSON_AddStringToObject(object, name, s) \ + cJSON_AddItemToObject(object, name, cJSON_CreateString(s)) + +/* When assigning an integer value, it needs to be propagated to valuedouble + * too. */ +#define cJSON_SetIntValue(object, val) \ + ((object) ? (object)->valueint = (object)->valuedouble = (val) : (val)) +#define cJSON_SetNumberValue(object, val) \ + ((object) ? (object)->valueint = (object)->valuedouble = (val) : (val)) + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/third_party/vulkan/loader/debug_report.c b/third_party/vulkan/loader/debug_report.c new file mode 100644 index 000000000..232fa6d6b --- /dev/null +++ b/third_party/vulkan/loader/debug_report.c @@ -0,0 +1,319 @@ +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * Copyright (C) 2015-2016 Google Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Courtney Goeltzenleuchter + * Author: Jon Ashburn + * + */ + +#define _GNU_SOURCE +#include +#include +#include +#include +#ifndef WIN32 +#include +#else +#endif +#include "vk_loader_platform.h" +#include "debug_report.h" +#include "vulkan/vk_layer.h" + +typedef void(VKAPI_PTR *PFN_stringCallback)(char *message); + +static const VkExtensionProperties debug_report_extension_info = { + .extensionName = VK_EXT_DEBUG_REPORT_EXTENSION_NAME, + .specVersion = VK_EXT_DEBUG_REPORT_SPEC_VERSION, +}; + +void debug_report_add_instance_extensions( + const struct loader_instance *inst, + struct loader_extension_list *ext_list) { + loader_add_to_ext_list(inst, ext_list, 1, &debug_report_extension_info); +} + +void debug_report_create_instance(struct loader_instance *ptr_instance, + const VkInstanceCreateInfo *pCreateInfo) { + ptr_instance->debug_report_enabled = false; + + for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) { + ptr_instance->debug_report_enabled = true; + return; + } + } +} + +VkResult +util_CreateDebugReportCallback(struct loader_instance *inst, + VkDebugReportCallbackCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDebugReportCallbackEXT callback) { + VkLayerDbgFunctionNode *pNewDbgFuncNode; + if (pAllocator != NULL) { + pNewDbgFuncNode = (VkLayerDbgFunctionNode *)pAllocator->pfnAllocation( + pAllocator->pUserData, sizeof(VkLayerDbgFunctionNode), + sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + } else { + pNewDbgFuncNode = (VkLayerDbgFunctionNode *)loader_heap_alloc( + inst, sizeof(VkLayerDbgFunctionNode), + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + } + if (!pNewDbgFuncNode) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + pNewDbgFuncNode->msgCallback = callback; + pNewDbgFuncNode->pfnMsgCallback = pCreateInfo->pfnCallback; + pNewDbgFuncNode->msgFlags = pCreateInfo->flags; + pNewDbgFuncNode->pUserData = pCreateInfo->pUserData; + pNewDbgFuncNode->pNext = inst->DbgFunctionHead; + inst->DbgFunctionHead = pNewDbgFuncNode; + + return VK_SUCCESS; +} + +static VKAPI_ATTR VkResult VKAPI_CALL debug_report_CreateDebugReportCallback( + VkInstance instance, VkDebugReportCallbackCreateInfoEXT *pCreateInfo, + VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pCallback) { + struct loader_instance *inst = loader_get_instance(instance); + loader_platform_thread_lock_mutex(&loader_lock); + VkResult result = inst->disp->CreateDebugReportCallbackEXT( + instance, pCreateInfo, pAllocator, pCallback); + if (result == VK_SUCCESS) { + result = util_CreateDebugReportCallback(inst, pCreateInfo, pAllocator, + *pCallback); + } + loader_platform_thread_unlock_mutex(&loader_lock); + return result; +} + +// Utility function to handle reporting +VkBool32 util_DebugReportMessage(const struct loader_instance *inst, + VkFlags msgFlags, + VkDebugReportObjectTypeEXT objectType, + uint64_t srcObject, size_t location, + int32_t msgCode, const char *pLayerPrefix, + const char *pMsg) { + VkBool32 bail = false; + VkLayerDbgFunctionNode *pTrav = inst->DbgFunctionHead; + while (pTrav) { + if (pTrav->msgFlags & msgFlags) { + if (pTrav->pfnMsgCallback(msgFlags, objectType, srcObject, location, + msgCode, pLayerPrefix, pMsg, + pTrav->pUserData)) { + bail = true; + } + } + pTrav = pTrav->pNext; + } + + return bail; +} + +void util_DestroyDebugReportCallback(struct loader_instance *inst, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks *pAllocator) { + VkLayerDbgFunctionNode *pTrav = inst->DbgFunctionHead; + VkLayerDbgFunctionNode *pPrev = pTrav; + + while (pTrav) { + if (pTrav->msgCallback == callback) { + pPrev->pNext = pTrav->pNext; + if (inst->DbgFunctionHead == pTrav) + inst->DbgFunctionHead = pTrav->pNext; + if (pAllocator != NULL) { + pAllocator->pfnFree(pAllocator->pUserData, pTrav); + } else { + loader_heap_free(inst, pTrav); + } + break; + } + pPrev = pTrav; + pTrav = pTrav->pNext; + } +} + +static VKAPI_ATTR void VKAPI_CALL +debug_report_DestroyDebugReportCallback(VkInstance instance, + VkDebugReportCallbackEXT callback, + VkAllocationCallbacks *pAllocator) { + struct loader_instance *inst = loader_get_instance(instance); + loader_platform_thread_lock_mutex(&loader_lock); + + inst->disp->DestroyDebugReportCallbackEXT(instance, callback, pAllocator); + + util_DestroyDebugReportCallback(inst, callback, pAllocator); + + loader_platform_thread_unlock_mutex(&loader_lock); +} + +static VKAPI_ATTR void VKAPI_CALL debug_report_DebugReportMessage( + VkInstance instance, VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objType, uint64_t object, size_t location, + int32_t msgCode, const char *pLayerPrefix, const char *pMsg) { + struct loader_instance *inst = loader_get_instance(instance); + + inst->disp->DebugReportMessageEXT(instance, flags, objType, object, + location, msgCode, pLayerPrefix, pMsg); +} + +/* + * This is the instance chain terminator function + * for CreateDebugReportCallback + */ + +VKAPI_ATTR VkResult VKAPI_CALL loader_CreateDebugReportCallback( + VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDebugReportCallbackEXT *pCallback) { + VkDebugReportCallbackEXT *icd_info; + const struct loader_icd *icd; + struct loader_instance *inst = (struct loader_instance *)instance; + VkResult res; + uint32_t storage_idx; + + icd_info = calloc(sizeof(VkDebugReportCallbackEXT), inst->total_icd_count); + if (!icd_info) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + storage_idx = 0; + for (icd = inst->icds; icd; icd = icd->next) { + if (!icd->CreateDebugReportCallbackEXT) { + continue; + } + + res = icd->CreateDebugReportCallbackEXT( + icd->instance, pCreateInfo, pAllocator, &icd_info[storage_idx]); + + if (res != VK_SUCCESS) { + break; + } + storage_idx++; + } + + /* roll back on errors */ + if (icd) { + storage_idx = 0; + for (icd = inst->icds; icd; icd = icd->next) { + if (icd_info[storage_idx]) { + icd->DestroyDebugReportCallbackEXT( + icd->instance, icd_info[storage_idx], pAllocator); + } + storage_idx++; + } + + return res; + } + + *(VkDebugReportCallbackEXT **)pCallback = icd_info; + + return VK_SUCCESS; +} + +/* + * This is the instance chain terminator function + * for DestroyDebugReportCallback + */ +VKAPI_ATTR void VKAPI_CALL +loader_DestroyDebugReportCallback(VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks *pAllocator) { + uint32_t storage_idx; + VkDebugReportCallbackEXT *icd_info; + const struct loader_icd *icd; + + struct loader_instance *inst = (struct loader_instance *)instance; + icd_info = *(VkDebugReportCallbackEXT **)&callback; + storage_idx = 0; + for (icd = inst->icds; icd; icd = icd->next) { + if (icd_info[storage_idx]) { + icd->DestroyDebugReportCallbackEXT( + icd->instance, icd_info[storage_idx], pAllocator); + } + storage_idx++; + } +} + +/* + * This is the instance chain terminator function + * for DebugReportMessage + */ +VKAPI_ATTR void VKAPI_CALL +loader_DebugReportMessage(VkInstance instance, VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objType, uint64_t object, + size_t location, int32_t msgCode, + const char *pLayerPrefix, const char *pMsg) { + const struct loader_icd *icd; + + struct loader_instance *inst = (struct loader_instance *)instance; + + loader_platform_thread_lock_mutex(&loader_lock); + for (icd = inst->icds; icd; icd = icd->next) { + if (icd->DebugReportMessageEXT != NULL) { + icd->DebugReportMessageEXT(icd->instance, flags, objType, object, + location, msgCode, pLayerPrefix, pMsg); + } + } + + /* + * Now that all ICDs have seen the message, call the necessary callbacks. + * Ignoring "bail" return value as there is nothing to bail from at this + * point. + */ + + util_DebugReportMessage(inst, flags, objType, object, location, msgCode, + pLayerPrefix, pMsg); + + loader_platform_thread_unlock_mutex(&loader_lock); +} + +bool debug_report_instance_gpa(struct loader_instance *ptr_instance, + const char *name, void **addr) { + // debug_report is currently advertised to be supported by the loader, + // so always return the entry points if name matches and it's enabled + *addr = NULL; + + if (!strcmp("vkCreateDebugReportCallbackEXT", name)) { + *addr = ptr_instance->debug_report_enabled + ? (void *)debug_report_CreateDebugReportCallback + : NULL; + return true; + } + if (!strcmp("vkDestroyDebugReportCallbackEXT", name)) { + *addr = ptr_instance->debug_report_enabled + ? (void *)debug_report_DestroyDebugReportCallback + : NULL; + return true; + } + if (!strcmp("vkDebugReportMessageEXT", name)) { + *addr = ptr_instance->debug_report_enabled + ? (void *)debug_report_DebugReportMessage + : NULL; + return true; + } + return false; +} diff --git a/third_party/vulkan/loader/debug_report.h b/third_party/vulkan/loader/debug_report.h new file mode 100644 index 000000000..7b665a5f3 --- /dev/null +++ b/third_party/vulkan/loader/debug_report.h @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * Copyright (C) 2015-2016 Google Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Courtney Goeltzenleuchter + * Author: Jon Ashburn + * + */ + +#include "vk_loader_platform.h" +#include "loader.h" +/* + * CreateMsgCallback is global and needs to be + * applied to all layers and ICDs. + * What happens if a layer is enabled on both the instance chain + * as well as the device chain and a call to CreateMsgCallback is made? + * Do we need to make sure that each layer / driver only gets called once? + * Should a layer implementing support for CreateMsgCallback only be allowed (?) + * to live on one chain? Or maybe make it the application's responsibility. + * If the app enables DRAW_STATE on at both CreateInstance time and CreateDevice + * time, CreateMsgCallback will call the DRAW_STATE layer twice. Once via + * the instance chain and once via the device chain. + * The loader should only return the DEBUG_REPORT extension as supported + * for the GetGlobalExtensionSupport call. That should help eliminate one + * duplication. + * Since the instance chain requires us iterating over the available ICDs + * and each ICD will have it's own unique MsgCallback object we need to + * track those objects to give back the right one. + * This also implies that the loader has to intercept vkDestroyObject and + * if the extension is enabled and the object type is a MsgCallback then + * we must translate the object into the proper ICD specific ones. + * DestroyObject works on a device chain. Should not be what's destroying + * the MsgCallback object. That needs to be an instance thing. So, since + * we used an instance to create it, we need a custom Destroy that also + * takes an instance. That way we can iterate over the ICDs properly. + * Example use: + * CreateInstance: DEBUG_REPORT + * Loader will create instance chain with enabled extensions. + * TODO: Should validation layers be enabled here? If not, they will not be in + * the instance chain. + * fn = GetProcAddr(INSTANCE, "vkCreateMsgCallback") -> point to loader's + * vkCreateMsgCallback + * App creates a callback object: fn(..., &MsgCallbackObject1) + * Have only established the instance chain so far. Loader will call the + * instance chain. + * Each layer in the instance chain will call down to the next layer, + * terminating with + * the CreateMsgCallback loader terminator function that creates the actual + * MsgCallbackObject1 object. + * The loader CreateMsgCallback terminator will iterate over the ICDs. + * Calling each ICD that supports vkCreateMsgCallback and collect answers in + * icd_msg_callback_map here. + * As result is sent back up the chain each layer has opportunity to record the + * callback operation and + * appropriate MsgCallback object. + * ... + * Any reports matching the flags set in MsgCallbackObject1 will generate the + * defined callback behavior + * in the layer / ICD that initiated that report. + * ... + * CreateDevice: MemTracker:... + * App does not include DEBUG_REPORT as that is a global extension. + * TODO: GetExtensionSupport must not report DEBUG_REPORT when using instance. + * App MUST include any desired validation layers or they will not participate + * in the device call chain. + * App creates a callback object: fn(..., &MsgCallbackObject2) + * Loader's vkCreateMsgCallback is called. + * Loader sends call down instance chain - this is a global extension - any + * validation layer that was + * enabled at CreateInstance will be able to register the callback. Loader will + * iterate over the ICDs and + * will record the ICD's version of the MsgCallback2 object here. + * ... + * Any report will go to the layer's report function and it will check the flags + * for MsgCallbackObject1 + * and MsgCallbackObject2 and take the appropriate action as indicated by the + * app. + * ... + * App calls vkDestroyMsgCallback( MsgCallbackObject1 ) + * Loader's DestroyMsgCallback is where call starts. DestroyMsgCallback will be + * sent down instance chain + * ending in the loader's DestroyMsgCallback terminator which will iterate over + * the ICD's destroying each + * ICD version of that MsgCallback object and then destroy the loader's version + * of the object. + * Any reports generated after this will only have MsgCallbackObject2 available. + */ + +void debug_report_add_instance_extensions( + const struct loader_instance *inst, struct loader_extension_list *ext_list); + +void debug_report_create_instance(struct loader_instance *ptr_instance, + const VkInstanceCreateInfo *pCreateInfo); + +bool debug_report_instance_gpa(struct loader_instance *ptr_instance, + const char *name, void **addr); + +VKAPI_ATTR VkResult VKAPI_CALL loader_CreateDebugReportCallback( + VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDebugReportCallbackEXT *pCallback); + +VKAPI_ATTR void VKAPI_CALL +loader_DestroyDebugReportCallback(VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks *pAllocator); + +VKAPI_ATTR void VKAPI_CALL +loader_DebugReportMessage(VkInstance instance, VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objType, uint64_t object, + size_t location, int32_t msgCode, + const char *pLayerPrefix, const char *pMsg); + +VkResult +util_CreateDebugReportCallback(struct loader_instance *inst, + VkDebugReportCallbackCreateInfoEXT *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDebugReportCallbackEXT callback); + +void util_DestroyDebugReportCallback(struct loader_instance *inst, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks *pAllocator); + +VkBool32 util_DebugReportMessage(const struct loader_instance *inst, + VkFlags msgFlags, + VkDebugReportObjectTypeEXT objectType, + uint64_t srcObject, size_t location, + int32_t msgCode, const char *pLayerPrefix, + const char *pMsg); diff --git a/third_party/vulkan/loader/dev_ext_trampoline.c b/third_party/vulkan/loader/dev_ext_trampoline.c new file mode 100644 index 000000000..b752086b5 --- /dev/null +++ b/third_party/vulkan/loader/dev_ext_trampoline.c @@ -0,0 +1,2038 @@ +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Jon Ashburn + */ + +#include "vk_loader_platform.h" +#include "loader.h" +#if defined(__linux__) +#pragma GCC optimize(3) // force gcc to use tail-calls +#endif + +VKAPI_ATTR void VKAPI_CALL vkDevExt0(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[0](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt1(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[1](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt2(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[2](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt3(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[3](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt4(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[4](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt5(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[5](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt6(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[6](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt7(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[7](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt8(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[8](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt9(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[9](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt10(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[10](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt11(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[11](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt12(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[12](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt13(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[13](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt14(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[14](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt15(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[15](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt16(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[16](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt17(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[17](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt18(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[18](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt19(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[19](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt20(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[20](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt21(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[21](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt22(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[22](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt23(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[23](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt24(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[24](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt25(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[25](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt26(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[26](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt27(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[27](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt28(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[28](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt29(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[29](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt30(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[30](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt31(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[31](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt32(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[32](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt33(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[33](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt34(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[34](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt35(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[35](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt36(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[36](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt37(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[37](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt38(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[38](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt39(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[39](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt40(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[40](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt41(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[41](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt42(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[42](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt43(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[43](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt44(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[44](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt45(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[45](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt46(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[46](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt47(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[47](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt48(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[48](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt49(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[49](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt50(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[50](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt51(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[51](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt52(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[52](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt53(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[53](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt54(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[54](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt55(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[55](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt56(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[56](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt57(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[57](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt58(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[58](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt59(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[59](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt60(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[60](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt61(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[61](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt62(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[62](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt63(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[63](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt64(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[64](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt65(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[65](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt66(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[66](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt67(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[67](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt68(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[68](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt69(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[69](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt70(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[70](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt71(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[71](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt72(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[72](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt73(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[73](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt74(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[74](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt75(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[75](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt76(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[76](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt77(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[77](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt78(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[78](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt79(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[79](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt80(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[80](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt81(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[81](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt82(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[82](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt83(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[83](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt84(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[84](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt85(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[85](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt86(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[86](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt87(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[87](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt88(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[88](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt89(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[89](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt90(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[90](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt91(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[91](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt92(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[92](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt93(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[93](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt94(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[94](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt95(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[95](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt96(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[96](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt97(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[97](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt98(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[98](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt99(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[99](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt100(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[100](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt101(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[101](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt102(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[102](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt103(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[103](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt104(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[104](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt105(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[105](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt106(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[106](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt107(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[107](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt108(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[108](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt109(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[109](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt110(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[110](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt111(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[111](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt112(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[112](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt113(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[113](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt114(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[114](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt115(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[115](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt116(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[116](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt117(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[117](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt118(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[118](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt119(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[119](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt120(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[120](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt121(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[121](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt122(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[122](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt123(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[123](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt124(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[124](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt125(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[125](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt126(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[126](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt127(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[127](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt128(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[128](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt129(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[129](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt130(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[130](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt131(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[131](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt132(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[132](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt133(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[133](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt134(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[134](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt135(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[135](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt136(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[136](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt137(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[137](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt138(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[138](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt139(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[139](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt140(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[140](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt141(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[141](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt142(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[142](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt143(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[143](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt144(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[144](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt145(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[145](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt146(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[146](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt147(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[147](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt148(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[148](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt149(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[149](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt150(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[150](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt151(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[151](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt152(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[152](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt153(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[153](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt154(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[154](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt155(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[155](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt156(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[156](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt157(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[157](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt158(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[158](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt159(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[159](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt160(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[160](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt161(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[161](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt162(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[162](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt163(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[163](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt164(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[164](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt165(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[165](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt166(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[166](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt167(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[167](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt168(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[168](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt169(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[169](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt170(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[170](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt171(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[171](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt172(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[172](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt173(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[173](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt174(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[174](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt175(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[175](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt176(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[176](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt177(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[177](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt178(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[178](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt179(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[179](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt180(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[180](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt181(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[181](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt182(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[182](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt183(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[183](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt184(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[184](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt185(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[185](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt186(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[186](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt187(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[187](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt188(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[188](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt189(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[189](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt190(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[190](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt191(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[191](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt192(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[192](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt193(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[193](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt194(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[194](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt195(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[195](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt196(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[196](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt197(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[197](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt198(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[198](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt199(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[199](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt200(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[200](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt201(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[201](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt202(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[202](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt203(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[203](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt204(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[204](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt205(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[205](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt206(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[206](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt207(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[207](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt208(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[208](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt209(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[209](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt210(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[210](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt211(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[211](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt212(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[212](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt213(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[213](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt214(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[214](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt215(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[215](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt216(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[216](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt217(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[217](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt218(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[218](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt219(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[219](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt220(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[220](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt221(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[221](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt222(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[222](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt223(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[223](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt224(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[224](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt225(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[225](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt226(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[226](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt227(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[227](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt228(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[228](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt229(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[229](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt230(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[230](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt231(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[231](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt232(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[232](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt233(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[233](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt234(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[234](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt235(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[235](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt236(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[236](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt237(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[237](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt238(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[238](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt239(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[239](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt240(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[240](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt241(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[241](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt242(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[242](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt243(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[243](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt244(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[244](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt245(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[245](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt246(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[246](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt247(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[247](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt248(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[248](device); +} + +VKAPI_ATTR void VKAPI_CALL vkDevExt249(VkDevice device) { + const struct loader_dev_dispatch_table *disp; + disp = loader_get_dev_dispatch(device); + disp->ext_dispatch.DevExt[249](device); +} + +void *loader_get_dev_ext_trampoline(uint32_t index) { + switch (index) { + case 0: + return vkDevExt0; + case 1: + return vkDevExt1; + case 2: + return vkDevExt2; + case 3: + return vkDevExt3; + case 4: + return vkDevExt4; + case 5: + return vkDevExt5; + case 6: + return vkDevExt6; + case 7: + return vkDevExt7; + case 8: + return vkDevExt8; + case 9: + return vkDevExt9; + case 10: + return vkDevExt10; + case 11: + return vkDevExt11; + case 12: + return vkDevExt12; + case 13: + return vkDevExt13; + case 14: + return vkDevExt14; + case 15: + return vkDevExt15; + case 16: + return vkDevExt16; + case 17: + return vkDevExt17; + case 18: + return vkDevExt18; + case 19: + return vkDevExt19; + case 20: + return vkDevExt20; + case 21: + return vkDevExt21; + case 22: + return vkDevExt22; + case 23: + return vkDevExt23; + case 24: + return vkDevExt24; + case 25: + return vkDevExt25; + case 26: + return vkDevExt26; + case 27: + return vkDevExt27; + case 28: + return vkDevExt28; + case 29: + return vkDevExt29; + case 30: + return vkDevExt30; + case 31: + return vkDevExt31; + case 32: + return vkDevExt32; + case 33: + return vkDevExt33; + case 34: + return vkDevExt34; + case 35: + return vkDevExt35; + case 36: + return vkDevExt36; + case 37: + return vkDevExt37; + case 38: + return vkDevExt38; + case 39: + return vkDevExt39; + case 40: + return vkDevExt40; + case 41: + return vkDevExt41; + case 42: + return vkDevExt42; + case 43: + return vkDevExt43; + case 44: + return vkDevExt44; + case 45: + return vkDevExt45; + case 46: + return vkDevExt46; + case 47: + return vkDevExt47; + case 48: + return vkDevExt48; + case 49: + return vkDevExt49; + case 50: + return vkDevExt50; + case 51: + return vkDevExt51; + case 52: + return vkDevExt52; + case 53: + return vkDevExt53; + case 54: + return vkDevExt54; + case 55: + return vkDevExt55; + case 56: + return vkDevExt56; + case 57: + return vkDevExt57; + case 58: + return vkDevExt58; + case 59: + return vkDevExt59; + case 60: + return vkDevExt60; + case 61: + return vkDevExt61; + case 62: + return vkDevExt62; + case 63: + return vkDevExt63; + case 64: + return vkDevExt64; + case 65: + return vkDevExt65; + case 66: + return vkDevExt66; + case 67: + return vkDevExt67; + case 68: + return vkDevExt68; + case 69: + return vkDevExt69; + case 70: + return vkDevExt70; + case 71: + return vkDevExt71; + case 72: + return vkDevExt72; + case 73: + return vkDevExt73; + case 74: + return vkDevExt74; + case 75: + return vkDevExt75; + case 76: + return vkDevExt76; + case 77: + return vkDevExt77; + case 78: + return vkDevExt78; + case 79: + return vkDevExt79; + case 80: + return vkDevExt80; + case 81: + return vkDevExt81; + case 82: + return vkDevExt82; + case 83: + return vkDevExt83; + case 84: + return vkDevExt84; + case 85: + return vkDevExt85; + case 86: + return vkDevExt86; + case 87: + return vkDevExt87; + case 88: + return vkDevExt88; + case 89: + return vkDevExt89; + case 90: + return vkDevExt90; + case 91: + return vkDevExt91; + case 92: + return vkDevExt92; + case 93: + return vkDevExt93; + case 94: + return vkDevExt94; + case 95: + return vkDevExt95; + case 96: + return vkDevExt96; + case 97: + return vkDevExt97; + case 98: + return vkDevExt98; + case 99: + return vkDevExt99; + case 100: + return vkDevExt100; + case 101: + return vkDevExt101; + case 102: + return vkDevExt102; + case 103: + return vkDevExt103; + case 104: + return vkDevExt104; + case 105: + return vkDevExt105; + case 106: + return vkDevExt106; + case 107: + return vkDevExt107; + case 108: + return vkDevExt108; + case 109: + return vkDevExt109; + case 110: + return vkDevExt110; + case 111: + return vkDevExt111; + case 112: + return vkDevExt112; + case 113: + return vkDevExt113; + case 114: + return vkDevExt114; + case 115: + return vkDevExt115; + case 116: + return vkDevExt116; + case 117: + return vkDevExt117; + case 118: + return vkDevExt118; + case 119: + return vkDevExt119; + case 120: + return vkDevExt120; + case 121: + return vkDevExt121; + case 122: + return vkDevExt122; + case 123: + return vkDevExt123; + case 124: + return vkDevExt124; + case 125: + return vkDevExt125; + case 126: + return vkDevExt126; + case 127: + return vkDevExt127; + case 128: + return vkDevExt128; + case 129: + return vkDevExt129; + case 130: + return vkDevExt130; + case 131: + return vkDevExt131; + case 132: + return vkDevExt132; + case 133: + return vkDevExt133; + case 134: + return vkDevExt134; + case 135: + return vkDevExt135; + case 136: + return vkDevExt136; + case 137: + return vkDevExt137; + case 138: + return vkDevExt138; + case 139: + return vkDevExt139; + case 140: + return vkDevExt140; + case 141: + return vkDevExt141; + case 142: + return vkDevExt142; + case 143: + return vkDevExt143; + case 144: + return vkDevExt144; + case 145: + return vkDevExt145; + case 146: + return vkDevExt146; + case 147: + return vkDevExt147; + case 148: + return vkDevExt148; + case 149: + return vkDevExt149; + case 150: + return vkDevExt150; + case 151: + return vkDevExt151; + case 152: + return vkDevExt152; + case 153: + return vkDevExt153; + case 154: + return vkDevExt154; + case 155: + return vkDevExt155; + case 156: + return vkDevExt156; + case 157: + return vkDevExt157; + case 158: + return vkDevExt158; + case 159: + return vkDevExt159; + case 160: + return vkDevExt160; + case 161: + return vkDevExt161; + case 162: + return vkDevExt162; + case 163: + return vkDevExt163; + case 164: + return vkDevExt164; + case 165: + return vkDevExt165; + case 166: + return vkDevExt166; + case 167: + return vkDevExt167; + case 168: + return vkDevExt168; + case 169: + return vkDevExt169; + case 170: + return vkDevExt170; + case 171: + return vkDevExt171; + case 172: + return vkDevExt172; + case 173: + return vkDevExt173; + case 174: + return vkDevExt174; + case 175: + return vkDevExt175; + case 176: + return vkDevExt176; + case 177: + return vkDevExt177; + case 178: + return vkDevExt178; + case 179: + return vkDevExt179; + case 180: + return vkDevExt180; + case 181: + return vkDevExt181; + case 182: + return vkDevExt182; + case 183: + return vkDevExt183; + case 184: + return vkDevExt184; + case 185: + return vkDevExt185; + case 186: + return vkDevExt186; + case 187: + return vkDevExt187; + case 188: + return vkDevExt188; + case 189: + return vkDevExt189; + case 190: + return vkDevExt190; + case 191: + return vkDevExt191; + case 192: + return vkDevExt192; + case 193: + return vkDevExt193; + case 194: + return vkDevExt194; + case 195: + return vkDevExt195; + case 196: + return vkDevExt196; + case 197: + return vkDevExt197; + case 198: + return vkDevExt198; + case 199: + return vkDevExt199; + case 200: + return vkDevExt200; + case 201: + return vkDevExt201; + case 202: + return vkDevExt202; + case 203: + return vkDevExt203; + case 204: + return vkDevExt204; + case 205: + return vkDevExt205; + case 206: + return vkDevExt206; + case 207: + return vkDevExt207; + case 208: + return vkDevExt208; + case 209: + return vkDevExt209; + case 210: + return vkDevExt210; + case 211: + return vkDevExt211; + case 212: + return vkDevExt212; + case 213: + return vkDevExt213; + case 214: + return vkDevExt214; + case 215: + return vkDevExt215; + case 216: + return vkDevExt216; + case 217: + return vkDevExt217; + case 218: + return vkDevExt218; + case 219: + return vkDevExt219; + case 220: + return vkDevExt220; + case 221: + return vkDevExt221; + case 222: + return vkDevExt222; + case 223: + return vkDevExt223; + case 224: + return vkDevExt224; + case 225: + return vkDevExt225; + case 226: + return vkDevExt226; + case 227: + return vkDevExt227; + case 228: + return vkDevExt228; + case 229: + return vkDevExt229; + case 230: + return vkDevExt230; + case 231: + return vkDevExt231; + case 232: + return vkDevExt232; + case 233: + return vkDevExt233; + case 234: + return vkDevExt234; + case 235: + return vkDevExt235; + case 236: + return vkDevExt236; + case 237: + return vkDevExt237; + case 238: + return vkDevExt238; + case 239: + return vkDevExt239; + case 240: + return vkDevExt240; + case 241: + return vkDevExt241; + case 242: + return vkDevExt242; + case 243: + return vkDevExt243; + case 244: + return vkDevExt244; + case 245: + return vkDevExt245; + case 246: + return vkDevExt246; + case 247: + return vkDevExt247; + case 248: + return vkDevExt248; + case 249: + return vkDevExt249; + } + return NULL; +} diff --git a/third_party/vulkan/loader/dirent_on_windows.c b/third_party/vulkan/loader/dirent_on_windows.c new file mode 100644 index 000000000..985fb6a1a --- /dev/null +++ b/third_party/vulkan/loader/dirent_on_windows.c @@ -0,0 +1,130 @@ +/* + + Implementation of POSIX directory browsing functions and types for Win32. + + Author: Kevlin Henney (kevlin@acm.org, kevlin@curbralan.com) + History: Created March 1997. Updated June 2003 and July 2012. + Rights: See end of file. + +*/ +#include +#include +#include /* _findfirst and _findnext set errno iff they return -1 */ +#include +#include +#include "vk_loader_platform.h" +#include "loader.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef ptrdiff_t handle_type; /* C99's intptr_t not sufficiently portable */ + +struct DIR { + handle_type handle; /* -1 for failed rewind */ + struct _finddata_t info; + struct dirent result; /* d_name null iff first time */ + char *name; /* null-terminated char string */ +}; + +DIR *opendir(const char *name) { + DIR *dir = 0; + + if (name && name[0]) { + size_t base_length = strlen(name); + const char *all = /* search pattern must end with suitable wildcard */ + strchr("/\\", name[base_length - 1]) ? "*" : "/*"; + + if ((dir = (DIR *)loader_tls_heap_alloc(sizeof *dir)) != 0 && + (dir->name = (char *)loader_tls_heap_alloc(base_length + + strlen(all) + 1)) != 0) { + strcat(strcpy(dir->name, name), all); + + if ((dir->handle = + (handle_type)_findfirst(dir->name, &dir->info)) != -1) { + dir->result.d_name = 0; + } else /* rollback */ + { + loader_tls_heap_free(dir->name); + loader_tls_heap_free(dir); + dir = 0; + } + } else /* rollback */ + { + loader_tls_heap_free(dir); + dir = 0; + errno = ENOMEM; + } + } else { + errno = EINVAL; + } + + return dir; +} + +int closedir(DIR *dir) { + int result = -1; + + if (dir) { + if (dir->handle != -1) { + result = _findclose(dir->handle); + } + + loader_tls_heap_free(dir->name); + loader_tls_heap_free(dir); + } + + if (result == -1) /* map all errors to EBADF */ + { + errno = EBADF; + } + + return result; +} + +struct dirent *readdir(DIR *dir) { + struct dirent *result = 0; + + if (dir && dir->handle != -1) { + if (!dir->result.d_name || _findnext(dir->handle, &dir->info) != -1) { + result = &dir->result; + result->d_name = dir->info.name; + } + } else { + errno = EBADF; + } + + return result; +} + +void rewinddir(DIR *dir) { + if (dir && dir->handle != -1) { + _findclose(dir->handle); + dir->handle = (handle_type)_findfirst(dir->name, &dir->info); + dir->result.d_name = 0; + } else { + errno = EBADF; + } +} + +#ifdef __cplusplus +} +#endif + +/* + + Copyright Kevlin Henney, 1997, 2003, 2012. All rights reserved. + Copyright (c) 2015 The Khronos Group Inc. + Copyright (c) 2015 Valve Corporation + Copyright (c) 2015 LunarG, Inc. + Permission to use, copy, modify, and distribute this software and its + documentation for any purpose is hereby granted without fee, provided + that this copyright and permissions notice appear in all copies and + derivatives. + + This software is supplied "as is" without express or implied warranty. + + But that said, if there are any problems please get in touch. + +*/ diff --git a/third_party/vulkan/loader/dirent_on_windows.h b/third_party/vulkan/loader/dirent_on_windows.h new file mode 100644 index 000000000..8600f8ef0 --- /dev/null +++ b/third_party/vulkan/loader/dirent_on_windows.h @@ -0,0 +1,51 @@ +#ifndef DIRENT_INCLUDED +#define DIRENT_INCLUDED + +/* + + Declaration of POSIX directory browsing functions and types for Win32. + + Author: Kevlin Henney (kevlin@acm.org, kevlin@curbralan.com) + History: Created March 1997. Updated June 2003. + Rights: See end of file. + +*/ + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct DIR DIR; + +struct dirent { + char *d_name; +}; + +DIR *opendir(const char *); +int closedir(DIR *); +struct dirent *readdir(DIR *); +void rewinddir(DIR *); + +/* + + Copyright Kevlin Henney, 1997, 2003. All rights reserved. + Copyright (c) 2015 The Khronos Group Inc. + Copyright (c) 2015 Valve Corporation + Copyright (c) 2015 LunarG, Inc. + + Permission to use, copy, modify, and distribute this software and its + documentation for any purpose is hereby granted without fee, provided + that this copyright and permissions notice appear in all copies and + derivatives. + + This software is supplied "as is" without express or implied warranty. + + But that said, if there are any problems please get in touch. + +*/ + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/third_party/vulkan/loader/gpa_helper.h b/third_party/vulkan/loader/gpa_helper.h new file mode 100644 index 000000000..664d3dbc5 --- /dev/null +++ b/third_party/vulkan/loader/gpa_helper.h @@ -0,0 +1,379 @@ +/* + * + * Copyright (c) 2015 The Khronos Group Inc. + * Copyright (c) 2015 Valve Corporation + * Copyright (c) 2015 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Jon Ashburn + */ + +#include +#include "debug_report.h" +#include "wsi.h" + +static inline void *trampolineGetProcAddr(struct loader_instance *inst, + const char *funcName) { + // Don't include or check global functions + if (!strcmp(funcName, "vkGetInstanceProcAddr")) + return (PFN_vkVoidFunction)vkGetInstanceProcAddr; + if (!strcmp(funcName, "vkDestroyInstance")) + return (PFN_vkVoidFunction)vkDestroyInstance; + if (!strcmp(funcName, "vkEnumeratePhysicalDevices")) + return (PFN_vkVoidFunction)vkEnumeratePhysicalDevices; + if (!strcmp(funcName, "vkGetPhysicalDeviceFeatures")) + return (PFN_vkVoidFunction)vkGetPhysicalDeviceFeatures; + if (!strcmp(funcName, "vkGetPhysicalDeviceFormatProperties")) + return (PFN_vkVoidFunction)vkGetPhysicalDeviceFormatProperties; + if (!strcmp(funcName, "vkGetPhysicalDeviceImageFormatProperties")) + return (PFN_vkVoidFunction)vkGetPhysicalDeviceImageFormatProperties; + if (!strcmp(funcName, "vkGetPhysicalDeviceSparseImageFormatProperties")) + return ( + PFN_vkVoidFunction)vkGetPhysicalDeviceSparseImageFormatProperties; + if (!strcmp(funcName, "vkGetPhysicalDeviceProperties")) + return (PFN_vkVoidFunction)vkGetPhysicalDeviceProperties; + if (!strcmp(funcName, "vkGetPhysicalDeviceQueueFamilyProperties")) + return (PFN_vkVoidFunction)vkGetPhysicalDeviceQueueFamilyProperties; + if (!strcmp(funcName, "vkGetPhysicalDeviceMemoryProperties")) + return (PFN_vkVoidFunction)vkGetPhysicalDeviceMemoryProperties; + if (!strcmp(funcName, "vkEnumerateDeviceLayerProperties")) + return (PFN_vkVoidFunction)vkEnumerateDeviceLayerProperties; + if (!strcmp(funcName, "vkEnumerateDeviceExtensionProperties")) + return (PFN_vkVoidFunction)vkEnumerateDeviceExtensionProperties; + if (!strcmp(funcName, "vkCreateDevice")) + return (PFN_vkVoidFunction)vkCreateDevice; + if (!strcmp(funcName, "vkGetDeviceProcAddr")) + return (PFN_vkVoidFunction)vkGetDeviceProcAddr; + if (!strcmp(funcName, "vkDestroyDevice")) + return (PFN_vkVoidFunction)vkDestroyDevice; + if (!strcmp(funcName, "vkGetDeviceQueue")) + return (PFN_vkVoidFunction)vkGetDeviceQueue; + if (!strcmp(funcName, "vkQueueSubmit")) + return (PFN_vkVoidFunction)vkQueueSubmit; + if (!strcmp(funcName, "vkQueueWaitIdle")) + return (PFN_vkVoidFunction)vkQueueWaitIdle; + if (!strcmp(funcName, "vkDeviceWaitIdle")) + return (PFN_vkVoidFunction)vkDeviceWaitIdle; + if (!strcmp(funcName, "vkAllocateMemory")) + return (PFN_vkVoidFunction)vkAllocateMemory; + if (!strcmp(funcName, "vkFreeMemory")) + return (PFN_vkVoidFunction)vkFreeMemory; + if (!strcmp(funcName, "vkMapMemory")) + return (PFN_vkVoidFunction)vkMapMemory; + if (!strcmp(funcName, "vkUnmapMemory")) + return (PFN_vkVoidFunction)vkUnmapMemory; + if (!strcmp(funcName, "vkFlushMappedMemoryRanges")) + return (PFN_vkVoidFunction)vkFlushMappedMemoryRanges; + if (!strcmp(funcName, "vkInvalidateMappedMemoryRanges")) + return (PFN_vkVoidFunction)vkInvalidateMappedMemoryRanges; + if (!strcmp(funcName, "vkGetDeviceMemoryCommitment")) + return (PFN_vkVoidFunction)vkGetDeviceMemoryCommitment; + if (!strcmp(funcName, "vkGetImageSparseMemoryRequirements")) + return (PFN_vkVoidFunction)vkGetImageSparseMemoryRequirements; + if (!strcmp(funcName, "vkGetImageMemoryRequirements")) + return (PFN_vkVoidFunction)vkGetImageMemoryRequirements; + if (!strcmp(funcName, "vkGetBufferMemoryRequirements")) + return (PFN_vkVoidFunction)vkGetBufferMemoryRequirements; + if (!strcmp(funcName, "vkBindImageMemory")) + return (PFN_vkVoidFunction)vkBindImageMemory; + if (!strcmp(funcName, "vkBindBufferMemory")) + return (PFN_vkVoidFunction)vkBindBufferMemory; + if (!strcmp(funcName, "vkQueueBindSparse")) + return (PFN_vkVoidFunction)vkQueueBindSparse; + if (!strcmp(funcName, "vkCreateFence")) + return (PFN_vkVoidFunction)vkCreateFence; + if (!strcmp(funcName, "vkDestroyFence")) + return (PFN_vkVoidFunction)vkDestroyFence; + if (!strcmp(funcName, "vkGetFenceStatus")) + return (PFN_vkVoidFunction)vkGetFenceStatus; + if (!strcmp(funcName, "vkResetFences")) + return (PFN_vkVoidFunction)vkResetFences; + if (!strcmp(funcName, "vkWaitForFences")) + return (PFN_vkVoidFunction)vkWaitForFences; + if (!strcmp(funcName, "vkCreateSemaphore")) + return (PFN_vkVoidFunction)vkCreateSemaphore; + if (!strcmp(funcName, "vkDestroySemaphore")) + return (PFN_vkVoidFunction)vkDestroySemaphore; + if (!strcmp(funcName, "vkCreateEvent")) + return (PFN_vkVoidFunction)vkCreateEvent; + if (!strcmp(funcName, "vkDestroyEvent")) + return (PFN_vkVoidFunction)vkDestroyEvent; + if (!strcmp(funcName, "vkGetEventStatus")) + return (PFN_vkVoidFunction)vkGetEventStatus; + if (!strcmp(funcName, "vkSetEvent")) + return (PFN_vkVoidFunction)vkSetEvent; + if (!strcmp(funcName, "vkResetEvent")) + return (PFN_vkVoidFunction)vkResetEvent; + if (!strcmp(funcName, "vkCreateQueryPool")) + return (PFN_vkVoidFunction)vkCreateQueryPool; + if (!strcmp(funcName, "vkDestroyQueryPool")) + return (PFN_vkVoidFunction)vkDestroyQueryPool; + if (!strcmp(funcName, "vkGetQueryPoolResults")) + return (PFN_vkVoidFunction)vkGetQueryPoolResults; + if (!strcmp(funcName, "vkCreateBuffer")) + return (PFN_vkVoidFunction)vkCreateBuffer; + if (!strcmp(funcName, "vkDestroyBuffer")) + return (PFN_vkVoidFunction)vkDestroyBuffer; + if (!strcmp(funcName, "vkCreateBufferView")) + return (PFN_vkVoidFunction)vkCreateBufferView; + if (!strcmp(funcName, "vkDestroyBufferView")) + return (PFN_vkVoidFunction)vkDestroyBufferView; + if (!strcmp(funcName, "vkCreateImage")) + return (PFN_vkVoidFunction)vkCreateImage; + if (!strcmp(funcName, "vkDestroyImage")) + return (PFN_vkVoidFunction)vkDestroyImage; + if (!strcmp(funcName, "vkGetImageSubresourceLayout")) + return (PFN_vkVoidFunction)vkGetImageSubresourceLayout; + if (!strcmp(funcName, "vkCreateImageView")) + return (PFN_vkVoidFunction)vkCreateImageView; + if (!strcmp(funcName, "vkDestroyImageView")) + return (PFN_vkVoidFunction)vkDestroyImageView; + if (!strcmp(funcName, "vkCreateShaderModule")) + return (PFN_vkVoidFunction)vkCreateShaderModule; + if (!strcmp(funcName, "vkDestroyShaderModule")) + return (PFN_vkVoidFunction)vkDestroyShaderModule; + if (!strcmp(funcName, "vkCreatePipelineCache")) + return (PFN_vkVoidFunction)vkCreatePipelineCache; + if (!strcmp(funcName, "vkDestroyPipelineCache")) + return (PFN_vkVoidFunction)vkDestroyPipelineCache; + if (!strcmp(funcName, "vkGetPipelineCacheData")) + return (PFN_vkVoidFunction)vkGetPipelineCacheData; + if (!strcmp(funcName, "vkMergePipelineCaches")) + return (PFN_vkVoidFunction)vkMergePipelineCaches; + if (!strcmp(funcName, "vkCreateGraphicsPipelines")) + return (PFN_vkVoidFunction)vkCreateGraphicsPipelines; + if (!strcmp(funcName, "vkCreateComputePipelines")) + return (PFN_vkVoidFunction)vkCreateComputePipelines; + if (!strcmp(funcName, "vkDestroyPipeline")) + return (PFN_vkVoidFunction)vkDestroyPipeline; + if (!strcmp(funcName, "vkCreatePipelineLayout")) + return (PFN_vkVoidFunction)vkCreatePipelineLayout; + if (!strcmp(funcName, "vkDestroyPipelineLayout")) + return (PFN_vkVoidFunction)vkDestroyPipelineLayout; + if (!strcmp(funcName, "vkCreateSampler")) + return (PFN_vkVoidFunction)vkCreateSampler; + if (!strcmp(funcName, "vkDestroySampler")) + return (PFN_vkVoidFunction)vkDestroySampler; + if (!strcmp(funcName, "vkCreateDescriptorSetLayout")) + return (PFN_vkVoidFunction)vkCreateDescriptorSetLayout; + if (!strcmp(funcName, "vkDestroyDescriptorSetLayout")) + return (PFN_vkVoidFunction)vkDestroyDescriptorSetLayout; + if (!strcmp(funcName, "vkCreateDescriptorPool")) + return (PFN_vkVoidFunction)vkCreateDescriptorPool; + if (!strcmp(funcName, "vkDestroyDescriptorPool")) + return (PFN_vkVoidFunction)vkDestroyDescriptorPool; + if (!strcmp(funcName, "vkResetDescriptorPool")) + return (PFN_vkVoidFunction)vkResetDescriptorPool; + if (!strcmp(funcName, "vkAllocateDescriptorSets")) + return (PFN_vkVoidFunction)vkAllocateDescriptorSets; + if (!strcmp(funcName, "vkFreeDescriptorSets")) + return (PFN_vkVoidFunction)vkFreeDescriptorSets; + if (!strcmp(funcName, "vkUpdateDescriptorSets")) + return (PFN_vkVoidFunction)vkUpdateDescriptorSets; + if (!strcmp(funcName, "vkCreateFramebuffer")) + return (PFN_vkVoidFunction)vkCreateFramebuffer; + if (!strcmp(funcName, "vkDestroyFramebuffer")) + return (PFN_vkVoidFunction)vkDestroyFramebuffer; + if (!strcmp(funcName, "vkCreateRenderPass")) + return (PFN_vkVoidFunction)vkCreateRenderPass; + if (!strcmp(funcName, "vkDestroyRenderPass")) + return (PFN_vkVoidFunction)vkDestroyRenderPass; + if (!strcmp(funcName, "vkGetRenderAreaGranularity")) + return (PFN_vkVoidFunction)vkGetRenderAreaGranularity; + if (!strcmp(funcName, "vkCreateCommandPool")) + return (PFN_vkVoidFunction)vkCreateCommandPool; + if (!strcmp(funcName, "vkDestroyCommandPool")) + return (PFN_vkVoidFunction)vkDestroyCommandPool; + if (!strcmp(funcName, "vkResetCommandPool")) + return (PFN_vkVoidFunction)vkResetCommandPool; + if (!strcmp(funcName, "vkAllocateCommandBuffers")) + return (PFN_vkVoidFunction)vkAllocateCommandBuffers; + if (!strcmp(funcName, "vkFreeCommandBuffers")) + return (PFN_vkVoidFunction)vkFreeCommandBuffers; + if (!strcmp(funcName, "vkBeginCommandBuffer")) + return (PFN_vkVoidFunction)vkBeginCommandBuffer; + if (!strcmp(funcName, "vkEndCommandBuffer")) + return (PFN_vkVoidFunction)vkEndCommandBuffer; + if (!strcmp(funcName, "vkResetCommandBuffer")) + return (PFN_vkVoidFunction)vkResetCommandBuffer; + if (!strcmp(funcName, "vkCmdBindPipeline")) + return (PFN_vkVoidFunction)vkCmdBindPipeline; + if (!strcmp(funcName, "vkCmdBindDescriptorSets")) + return (PFN_vkVoidFunction)vkCmdBindDescriptorSets; + if (!strcmp(funcName, "vkCmdBindVertexBuffers")) + return (PFN_vkVoidFunction)vkCmdBindVertexBuffers; + if (!strcmp(funcName, "vkCmdBindIndexBuffer")) + return (PFN_vkVoidFunction)vkCmdBindIndexBuffer; + if (!strcmp(funcName, "vkCmdSetViewport")) + return (PFN_vkVoidFunction)vkCmdSetViewport; + if (!strcmp(funcName, "vkCmdSetScissor")) + return (PFN_vkVoidFunction)vkCmdSetScissor; + if (!strcmp(funcName, "vkCmdSetLineWidth")) + return (PFN_vkVoidFunction)vkCmdSetLineWidth; + if (!strcmp(funcName, "vkCmdSetDepthBias")) + return (PFN_vkVoidFunction)vkCmdSetDepthBias; + if (!strcmp(funcName, "vkCmdSetBlendConstants")) + return (PFN_vkVoidFunction)vkCmdSetBlendConstants; + if (!strcmp(funcName, "vkCmdSetDepthBounds")) + return (PFN_vkVoidFunction)vkCmdSetDepthBounds; + if (!strcmp(funcName, "vkCmdSetStencilCompareMask")) + return (PFN_vkVoidFunction)vkCmdSetStencilCompareMask; + if (!strcmp(funcName, "vkCmdSetStencilWriteMask")) + return (PFN_vkVoidFunction)vkCmdSetStencilWriteMask; + if (!strcmp(funcName, "vkCmdSetStencilReference")) + return (PFN_vkVoidFunction)vkCmdSetStencilReference; + if (!strcmp(funcName, "vkCmdDraw")) + return (PFN_vkVoidFunction)vkCmdDraw; + if (!strcmp(funcName, "vkCmdDrawIndexed")) + return (PFN_vkVoidFunction)vkCmdDrawIndexed; + if (!strcmp(funcName, "vkCmdDrawIndirect")) + return (PFN_vkVoidFunction)vkCmdDrawIndirect; + if (!strcmp(funcName, "vkCmdDrawIndexedIndirect")) + return (PFN_vkVoidFunction)vkCmdDrawIndexedIndirect; + if (!strcmp(funcName, "vkCmdDispatch")) + return (PFN_vkVoidFunction)vkCmdDispatch; + if (!strcmp(funcName, "vkCmdDispatchIndirect")) + return (PFN_vkVoidFunction)vkCmdDispatchIndirect; + if (!strcmp(funcName, "vkCmdCopyBuffer")) + return (PFN_vkVoidFunction)vkCmdCopyBuffer; + if (!strcmp(funcName, "vkCmdCopyImage")) + return (PFN_vkVoidFunction)vkCmdCopyImage; + if (!strcmp(funcName, "vkCmdBlitImage")) + return (PFN_vkVoidFunction)vkCmdBlitImage; + if (!strcmp(funcName, "vkCmdCopyBufferToImage")) + return (PFN_vkVoidFunction)vkCmdCopyBufferToImage; + if (!strcmp(funcName, "vkCmdCopyImageToBuffer")) + return (PFN_vkVoidFunction)vkCmdCopyImageToBuffer; + if (!strcmp(funcName, "vkCmdUpdateBuffer")) + return (PFN_vkVoidFunction)vkCmdUpdateBuffer; + if (!strcmp(funcName, "vkCmdFillBuffer")) + return (PFN_vkVoidFunction)vkCmdFillBuffer; + if (!strcmp(funcName, "vkCmdClearColorImage")) + return (PFN_vkVoidFunction)vkCmdClearColorImage; + if (!strcmp(funcName, "vkCmdClearDepthStencilImage")) + return (PFN_vkVoidFunction)vkCmdClearDepthStencilImage; + if (!strcmp(funcName, "vkCmdClearAttachments")) + return (PFN_vkVoidFunction)vkCmdClearAttachments; + if (!strcmp(funcName, "vkCmdResolveImage")) + return (PFN_vkVoidFunction)vkCmdResolveImage; + if (!strcmp(funcName, "vkCmdSetEvent")) + return (PFN_vkVoidFunction)vkCmdSetEvent; + if (!strcmp(funcName, "vkCmdResetEvent")) + return (PFN_vkVoidFunction)vkCmdResetEvent; + if (!strcmp(funcName, "vkCmdWaitEvents")) + return (PFN_vkVoidFunction)vkCmdWaitEvents; + if (!strcmp(funcName, "vkCmdPipelineBarrier")) + return (PFN_vkVoidFunction)vkCmdPipelineBarrier; + if (!strcmp(funcName, "vkCmdBeginQuery")) + return (PFN_vkVoidFunction)vkCmdBeginQuery; + if (!strcmp(funcName, "vkCmdEndQuery")) + return (PFN_vkVoidFunction)vkCmdEndQuery; + if (!strcmp(funcName, "vkCmdResetQueryPool")) + return (PFN_vkVoidFunction)vkCmdResetQueryPool; + if (!strcmp(funcName, "vkCmdWriteTimestamp")) + return (PFN_vkVoidFunction)vkCmdWriteTimestamp; + if (!strcmp(funcName, "vkCmdCopyQueryPoolResults")) + return (PFN_vkVoidFunction)vkCmdCopyQueryPoolResults; + if (!strcmp(funcName, "vkCmdPushConstants")) + return (PFN_vkVoidFunction)vkCmdPushConstants; + if (!strcmp(funcName, "vkCmdBeginRenderPass")) + return (PFN_vkVoidFunction)vkCmdBeginRenderPass; + if (!strcmp(funcName, "vkCmdNextSubpass")) + return (PFN_vkVoidFunction)vkCmdNextSubpass; + if (!strcmp(funcName, "vkCmdEndRenderPass")) + return (PFN_vkVoidFunction)vkCmdEndRenderPass; + if (!strcmp(funcName, "vkCmdExecuteCommands")) + return (PFN_vkVoidFunction)vkCmdExecuteCommands; + + // Instance extensions + void *addr; + if (debug_report_instance_gpa(inst, funcName, &addr)) + return addr; + + if (wsi_swapchain_instance_gpa(inst, funcName, &addr)) + return addr; + + addr = loader_dev_ext_gpa(inst, funcName); + return addr; +} + +static inline void *globalGetProcAddr(const char *name) { + if (!name || name[0] != 'v' || name[1] != 'k') + return NULL; + + name += 2; + if (!strcmp(name, "CreateInstance")) + return (void *)vkCreateInstance; + if (!strcmp(name, "EnumerateInstanceExtensionProperties")) + return (void *)vkEnumerateInstanceExtensionProperties; + if (!strcmp(name, "EnumerateInstanceLayerProperties")) + return (void *)vkEnumerateInstanceLayerProperties; + + return NULL; +} + +/* These functions require special handling by the loader. +* They are not just generic trampoline code entrypoints. +* Thus GPA must return loader entrypoint for these instead of first function +* in the chain. */ +static inline void *loader_non_passthrough_gipa(const char *name) { + if (!name || name[0] != 'v' || name[1] != 'k') + return NULL; + + name += 2; + if (!strcmp(name, "CreateInstance")) + return (void *)vkCreateInstance; + if (!strcmp(name, "DestroyInstance")) + return (void *)vkDestroyInstance; + if (!strcmp(name, "GetDeviceProcAddr")) + return (void *)vkGetDeviceProcAddr; + // remove once no longer locks + if (!strcmp(name, "EnumeratePhysicalDevices")) + return (void *)vkEnumeratePhysicalDevices; + if (!strcmp(name, "EnumerateDeviceExtensionProperties")) + return (void *)vkEnumerateDeviceExtensionProperties; + if (!strcmp(name, "EnumerateDeviceLayerProperties")) + return (void *)vkEnumerateDeviceLayerProperties; + if (!strcmp(name, "GetInstanceProcAddr")) + return (void *)vkGetInstanceProcAddr; + if (!strcmp(name, "CreateDevice")) + return (void *)vkCreateDevice; + + return NULL; +} + +static inline void *loader_non_passthrough_gdpa(const char *name) { + if (!name || name[0] != 'v' || name[1] != 'k') + return NULL; + + name += 2; + + if (!strcmp(name, "GetDeviceProcAddr")) + return (void *)vkGetDeviceProcAddr; + if (!strcmp(name, "DestroyDevice")) + return (void *)vkDestroyDevice; + if (!strcmp(name, "GetDeviceQueue")) + return (void *)vkGetDeviceQueue; + if (!strcmp(name, "AllocateCommandBuffers")) + return (void *)vkAllocateCommandBuffers; + + return NULL; +} diff --git a/third_party/vulkan/loader/loader.c b/third_party/vulkan/loader/loader.c new file mode 100644 index 000000000..5de0ffba3 --- /dev/null +++ b/third_party/vulkan/loader/loader.c @@ -0,0 +1,4504 @@ +/* + * + * Copyright (c) 2014-2016 The Khronos Group Inc. + * Copyright (c) 2014-2016 Valve Corporation + * Copyright (c) 2014-2016 LunarG, Inc. + * Copyright (C) 2015 Google Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Jon Ashburn + * Author: Courtney Goeltzenleuchter + * + */ + +#define _GNU_SOURCE +#include +#include +#include +#include +#include + +#include +#if defined(_WIN32) +#include "dirent_on_windows.h" +#else // _WIN32 +#include +#endif // _WIN32 +#include "vk_loader_platform.h" +#include "loader.h" +#include "gpa_helper.h" +#include "table_ops.h" +#include "debug_report.h" +#include "wsi.h" +#include "vulkan/vk_icd.h" +#include "cJSON.h" +#include "murmurhash.h" + +static loader_platform_dl_handle +loader_add_layer_lib(const struct loader_instance *inst, const char *chain_type, + struct loader_layer_properties *layer_prop); + +static void loader_remove_layer_lib(struct loader_instance *inst, + struct loader_layer_properties *layer_prop); + +struct loader_struct loader = {0}; +// TLS for instance for alloc/free callbacks +THREAD_LOCAL_DECL struct loader_instance *tls_instance; + +static bool loader_init_generic_list(const struct loader_instance *inst, + struct loader_generic_list *list_info, + size_t element_size); + +static size_t loader_platform_combine_path(char *dest, size_t len, ...); + +struct loader_phys_dev_per_icd { + uint32_t count; + VkPhysicalDevice *phys_devs; +}; + +enum loader_debug { + LOADER_INFO_BIT = 0x01, + LOADER_WARN_BIT = 0x02, + LOADER_PERF_BIT = 0x04, + LOADER_ERROR_BIT = 0x08, + LOADER_DEBUG_BIT = 0x10, +}; + +uint32_t g_loader_debug = 0; +uint32_t g_loader_log_msgs = 0; + +// thread safety lock for accessing global data structures such as "loader" +// all entrypoints on the instance chain need to be locked except GPA +// additionally CreateDevice and DestroyDevice needs to be locked +loader_platform_thread_mutex loader_lock; +loader_platform_thread_mutex loader_json_lock; + +const char *std_validation_str = "VK_LAYER_LUNARG_standard_validation"; + +// This table contains the loader's instance dispatch table, which contains +// default functions if no instance layers are activated. This contains +// pointers to "terminator functions". +const VkLayerInstanceDispatchTable instance_disp = { + .GetInstanceProcAddr = vkGetInstanceProcAddr, + .DestroyInstance = loader_DestroyInstance, + .EnumeratePhysicalDevices = loader_EnumeratePhysicalDevices, + .GetPhysicalDeviceFeatures = loader_GetPhysicalDeviceFeatures, + .GetPhysicalDeviceFormatProperties = + loader_GetPhysicalDeviceFormatProperties, + .GetPhysicalDeviceImageFormatProperties = + loader_GetPhysicalDeviceImageFormatProperties, + .GetPhysicalDeviceProperties = loader_GetPhysicalDeviceProperties, + .GetPhysicalDeviceQueueFamilyProperties = + loader_GetPhysicalDeviceQueueFamilyProperties, + .GetPhysicalDeviceMemoryProperties = + loader_GetPhysicalDeviceMemoryProperties, + .EnumerateDeviceExtensionProperties = + loader_EnumerateDeviceExtensionProperties, + .EnumerateDeviceLayerProperties = loader_EnumerateDeviceLayerProperties, + .GetPhysicalDeviceSparseImageFormatProperties = + loader_GetPhysicalDeviceSparseImageFormatProperties, + .DestroySurfaceKHR = loader_DestroySurfaceKHR, + .GetPhysicalDeviceSurfaceSupportKHR = + loader_GetPhysicalDeviceSurfaceSupportKHR, + .GetPhysicalDeviceSurfaceCapabilitiesKHR = + loader_GetPhysicalDeviceSurfaceCapabilitiesKHR, + .GetPhysicalDeviceSurfaceFormatsKHR = + loader_GetPhysicalDeviceSurfaceFormatsKHR, + .GetPhysicalDeviceSurfacePresentModesKHR = + loader_GetPhysicalDeviceSurfacePresentModesKHR, + .CreateDebugReportCallbackEXT = loader_CreateDebugReportCallback, + .DestroyDebugReportCallbackEXT = loader_DestroyDebugReportCallback, + .DebugReportMessageEXT = loader_DebugReportMessage, +#ifdef VK_USE_PLATFORM_MIR_KHR + .CreateMirSurfaceKHR = loader_CreateMirSurfaceKHR, + .GetPhysicalDeviceMirPresentationSupportKHR = + loader_GetPhysicalDeviceMirPresentationSupportKHR, +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + .CreateWaylandSurfaceKHR = loader_CreateWaylandSurfaceKHR, + .GetPhysicalDeviceWaylandPresentationSupportKHR = + loader_GetPhysicalDeviceWaylandPresentationSupportKHR, +#endif +#ifdef VK_USE_PLATFORM_WIN32_KHR + .CreateWin32SurfaceKHR = loader_CreateWin32SurfaceKHR, + .GetPhysicalDeviceWin32PresentationSupportKHR = + loader_GetPhysicalDeviceWin32PresentationSupportKHR, +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + .CreateXcbSurfaceKHR = loader_CreateXcbSurfaceKHR, + .GetPhysicalDeviceXcbPresentationSupportKHR = + loader_GetPhysicalDeviceXcbPresentationSupportKHR, +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR + .CreateXlibSurfaceKHR = loader_CreateXlibSurfaceKHR, + .GetPhysicalDeviceXlibPresentationSupportKHR = + loader_GetPhysicalDeviceXlibPresentationSupportKHR, +#endif +#ifdef VK_USE_PLATFORM_ANDROID_KHR + .CreateAndroidSurfaceKHR = loader_CreateAndroidSurfaceKHR, +#endif +}; + +LOADER_PLATFORM_THREAD_ONCE_DECLARATION(once_init); + +void *loader_heap_alloc(const struct loader_instance *instance, size_t size, + VkSystemAllocationScope alloc_scope) { + if (instance && instance->alloc_callbacks.pfnAllocation) { + /* TODO: What should default alignment be? 1, 4, 8, other? */ + return instance->alloc_callbacks.pfnAllocation( + instance->alloc_callbacks.pUserData, size, sizeof(int), + alloc_scope); + } + return malloc(size); +} + +void loader_heap_free(const struct loader_instance *instance, void *pMemory) { + if (pMemory == NULL) + return; + if (instance && instance->alloc_callbacks.pfnFree) { + instance->alloc_callbacks.pfnFree(instance->alloc_callbacks.pUserData, + pMemory); + return; + } + free(pMemory); +} + +void *loader_heap_realloc(const struct loader_instance *instance, void *pMemory, + size_t orig_size, size_t size, + VkSystemAllocationScope alloc_scope) { + if (pMemory == NULL || orig_size == 0) + return loader_heap_alloc(instance, size, alloc_scope); + if (size == 0) { + loader_heap_free(instance, pMemory); + return NULL; + } + // TODO use the callback realloc function + if (instance && instance->alloc_callbacks.pfnAllocation) { + if (size <= orig_size) { + memset(((uint8_t *)pMemory) + size, 0, orig_size - size); + return pMemory; + } + /* TODO: What should default alignment be? 1, 4, 8, other? */ + void *new_ptr = instance->alloc_callbacks.pfnAllocation( + instance->alloc_callbacks.pUserData, size, sizeof(int), + alloc_scope); + if (!new_ptr) + return NULL; + memcpy(new_ptr, pMemory, orig_size); + instance->alloc_callbacks.pfnFree(instance->alloc_callbacks.pUserData, + pMemory); + return new_ptr; + } + return realloc(pMemory, size); +} + +void *loader_tls_heap_alloc(size_t size) { + return loader_heap_alloc(tls_instance, size, + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); +} + +void loader_tls_heap_free(void *pMemory) { + loader_heap_free(tls_instance, pMemory); +} + +void loader_log(const struct loader_instance *inst, VkFlags msg_type, + int32_t msg_code, const char *format, ...) { + char msg[512]; + va_list ap; + int ret; + + va_start(ap, format); + ret = vsnprintf(msg, sizeof(msg), format, ap); + if ((ret >= (int)sizeof(msg)) || ret < 0) { + msg[sizeof(msg) - 1] = '\0'; + } + va_end(ap); + + if (inst) { + util_DebugReportMessage(inst, msg_type, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + (uint64_t)inst, 0, msg_code, "loader", msg); + } + + if (!(msg_type & g_loader_log_msgs)) { + return; + } + +#if defined(WIN32) + OutputDebugString(msg); + OutputDebugString("\n"); +#endif + fputs(msg, stderr); + fputc('\n', stderr); +} + +#if defined(WIN32) +static char *loader_get_next_path(char *path); +/** +* Find the list of registry files (names within a key) in key "location". +* +* This function looks in the registry (hive = DEFAULT_VK_REGISTRY_HIVE) key as +*given in "location" +* for a list or name/values which are added to a returned list (function return +*value). +* The DWORD values within the key must be 0 or they are skipped. +* Function return is a string with a ';' separated list of filenames. +* Function return is NULL if no valid name/value pairs are found in the key, +* or the key is not found. +* +* \returns +* A string list of filenames as pointer. +* When done using the returned string list, pointer should be freed. +*/ +static char *loader_get_registry_files(const struct loader_instance *inst, + char *location) { + LONG rtn_value; + HKEY hive, key; + DWORD access_flags; + char name[2048]; + char *out = NULL; + char *loc = location; + char *next; + DWORD idx = 0; + DWORD name_size = sizeof(name); + DWORD value; + DWORD total_size = 4096; + DWORD value_size = sizeof(value); + + while (*loc) { + next = loader_get_next_path(loc); + hive = DEFAULT_VK_REGISTRY_HIVE; + access_flags = KEY_QUERY_VALUE; + rtn_value = RegOpenKeyEx(hive, loc, 0, access_flags, &key); + if (rtn_value != ERROR_SUCCESS) { + // We still couldn't find the key, so give up: + loc = next; + continue; + } + + while ((rtn_value = RegEnumValue(key, idx++, name, &name_size, NULL, + NULL, (LPBYTE)&value, &value_size)) == + ERROR_SUCCESS) { + if (value_size == sizeof(value) && value == 0) { + if (out == NULL) { + out = loader_heap_alloc( + inst, total_size, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + out[0] = '\0'; + } else if (strlen(out) + name_size + 1 > total_size) { + out = loader_heap_realloc( + inst, out, total_size, total_size * 2, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + total_size *= 2; + } + if (out == NULL) { + loader_log( + inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory, failed loader_get_registry_files"); + return NULL; + } + if (strlen(out) == 0) + snprintf(out, name_size + 1, "%s", name); + else + snprintf(out + strlen(out), name_size + 2, "%c%s", + PATH_SEPERATOR, name); + } + name_size = 2048; + } + loc = next; + } + + return out; +} + +#endif // WIN32 + +/** + * Combine path elements, separating each element with the platform-specific + * directory separator, and save the combined string to a destination buffer, + * not exceeding the given length. Path elements are given as variadic args, + * with a NULL element terminating the list. + * + * \returns the total length of the combined string, not including an ASCII + * NUL termination character. This length may exceed the available storage: + * in this case, the written string will be truncated to avoid a buffer + * overrun, and the return value will greater than or equal to the storage + * size. A NULL argument may be provided as the destination buffer in order + * to determine the required string length without actually writing a string. + */ + +static size_t loader_platform_combine_path(char *dest, size_t len, ...) { + size_t required_len = 0; + va_list ap; + const char *component; + + va_start(ap, len); + + while ((component = va_arg(ap, const char *))) { + if (required_len > 0) { + // This path element is not the first non-empty element; prepend + // a directory separator if space allows + if (dest && required_len + 1 < len) { + snprintf(dest + required_len, len - required_len, "%c", + DIRECTORY_SYMBOL); + } + required_len++; + } + + if (dest && required_len < len) { + strncpy(dest + required_len, component, len - required_len); + } + required_len += strlen(component); + } + + va_end(ap); + + // strncpy(3) won't add a NUL terminating byte in the event of truncation. + if (dest && required_len >= len) { + dest[len - 1] = '\0'; + } + + return required_len; +} + +/** + * Given string of three part form "maj.min.pat" convert to a vulkan version + * number. + */ +static uint32_t loader_make_version(const char *vers_str) { + uint32_t vers = 0, major = 0, minor = 0, patch = 0; + char *minor_str = NULL; + char *patch_str = NULL; + char *cstr; + char *str; + + if (!vers_str) + return vers; + cstr = loader_stack_alloc(strlen(vers_str) + 1); + strcpy(cstr, vers_str); + while ((str = strchr(cstr, '.')) != NULL) { + if (minor_str == NULL) { + minor_str = str + 1; + *str = '\0'; + major = atoi(cstr); + } else if (patch_str == NULL) { + patch_str = str + 1; + *str = '\0'; + minor = atoi(minor_str); + } else { + return vers; + } + cstr = str + 1; + } + patch = atoi(patch_str); + + return VK_MAKE_VERSION(major, minor, patch); +} + +bool compare_vk_extension_properties(const VkExtensionProperties *op1, + const VkExtensionProperties *op2) { + return strcmp(op1->extensionName, op2->extensionName) == 0 ? true : false; +} + +/** + * Search the given ext_array for an extension + * matching the given vk_ext_prop + */ +bool has_vk_extension_property_array(const VkExtensionProperties *vk_ext_prop, + const uint32_t count, + const VkExtensionProperties *ext_array) { + for (uint32_t i = 0; i < count; i++) { + if (compare_vk_extension_properties(vk_ext_prop, &ext_array[i])) + return true; + } + return false; +} + +/** + * Search the given ext_list for an extension + * matching the given vk_ext_prop + */ +bool has_vk_extension_property(const VkExtensionProperties *vk_ext_prop, + const struct loader_extension_list *ext_list) { + for (uint32_t i = 0; i < ext_list->count; i++) { + if (compare_vk_extension_properties(&ext_list->list[i], vk_ext_prop)) + return true; + } + return false; +} + +static inline bool loader_is_layer_type_device(const enum layer_type type) { + if ((type & VK_LAYER_TYPE_DEVICE_EXPLICIT) || + (type & VK_LAYER_TYPE_DEVICE_IMPLICIT)) + return true; + return false; +} + +/* + * Search the given layer list for a layer matching the given layer name + */ +static struct loader_layer_properties * +loader_get_layer_property(const char *name, + const struct loader_layer_list *layer_list) { + for (uint32_t i = 0; i < layer_list->count; i++) { + const VkLayerProperties *item = &layer_list->list[i].info; + if (strcmp(name, item->layerName) == 0) + return &layer_list->list[i]; + } + return NULL; +} + +/** + * Get the next unused layer property in the list. Init the property to zero. + */ +static struct loader_layer_properties * +loader_get_next_layer_property(const struct loader_instance *inst, + struct loader_layer_list *layer_list) { + if (layer_list->capacity == 0) { + layer_list->list = + loader_heap_alloc(inst, sizeof(struct loader_layer_properties) * 64, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (layer_list->list == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't add any layer properties to list"); + return NULL; + } + memset(layer_list->list, 0, + sizeof(struct loader_layer_properties) * 64); + layer_list->capacity = sizeof(struct loader_layer_properties) * 64; + } + + // ensure enough room to add an entry + if ((layer_list->count + 1) * sizeof(struct loader_layer_properties) > + layer_list->capacity) { + layer_list->list = loader_heap_realloc( + inst, layer_list->list, layer_list->capacity, + layer_list->capacity * 2, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (layer_list->list == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "realloc failed for layer list"); + } + layer_list->capacity *= 2; + } + + layer_list->count++; + return &(layer_list->list[layer_list->count - 1]); +} + +/** + * Remove all layer properties entrys from the list + */ +void loader_delete_layer_properties(const struct loader_instance *inst, + struct loader_layer_list *layer_list) { + uint32_t i, j; + struct loader_device_extension_list *dev_ext_list; + if (!layer_list) + return; + + for (i = 0; i < layer_list->count; i++) { + loader_destroy_generic_list( + inst, (struct loader_generic_list *)&layer_list->list[i] + .instance_extension_list); + dev_ext_list = &layer_list->list[i].device_extension_list; + if (dev_ext_list->capacity > 0 && + dev_ext_list->list->entrypoint_count > 0) { + for (j = 0; j < dev_ext_list->list->entrypoint_count; j++) { + loader_heap_free(inst, dev_ext_list->list->entrypoints[j]); + } + loader_heap_free(inst, dev_ext_list->list->entrypoints); + } + loader_destroy_generic_list(inst, + (struct loader_generic_list *)dev_ext_list); + } + layer_list->count = 0; + + if (layer_list->capacity > 0) { + layer_list->capacity = 0; + loader_heap_free(inst, layer_list->list); + } +} + +static void loader_add_instance_extensions( + const struct loader_instance *inst, + const PFN_vkEnumerateInstanceExtensionProperties fp_get_props, + const char *lib_name, struct loader_extension_list *ext_list) { + uint32_t i, count = 0; + VkExtensionProperties *ext_props; + VkResult res; + + if (!fp_get_props) { + /* No EnumerateInstanceExtensionProperties defined */ + return; + } + + res = fp_get_props(NULL, &count, NULL); + if (res != VK_SUCCESS) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Error getting Instance extension count from %s", lib_name); + return; + } + + if (count == 0) { + /* No ExtensionProperties to report */ + return; + } + + ext_props = loader_stack_alloc(count * sizeof(VkExtensionProperties)); + + res = fp_get_props(NULL, &count, ext_props); + if (res != VK_SUCCESS) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Error getting Instance extensions from %s", lib_name); + return; + } + + for (i = 0; i < count; i++) { + char spec_version[64]; + + snprintf(spec_version, sizeof(spec_version), "%d.%d.%d", + VK_MAJOR(ext_props[i].specVersion), + VK_MINOR(ext_props[i].specVersion), + VK_PATCH(ext_props[i].specVersion)); + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Instance Extension: %s (%s) version %s", + ext_props[i].extensionName, lib_name, spec_version); + loader_add_to_ext_list(inst, ext_list, 1, &ext_props[i]); + } + + return; +} + +/* + * Initialize ext_list with the physical device extensions. + * The extension properties are passed as inputs in count and ext_props. + */ +static VkResult +loader_init_device_extensions(const struct loader_instance *inst, + struct loader_physical_device *phys_dev, + uint32_t count, VkExtensionProperties *ext_props, + struct loader_extension_list *ext_list) { + VkResult res; + uint32_t i; + + if (!loader_init_generic_list(inst, (struct loader_generic_list *)ext_list, + sizeof(VkExtensionProperties))) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + for (i = 0; i < count; i++) { + char spec_version[64]; + + snprintf(spec_version, sizeof(spec_version), "%d.%d.%d", + VK_MAJOR(ext_props[i].specVersion), + VK_MINOR(ext_props[i].specVersion), + VK_PATCH(ext_props[i].specVersion)); + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Device Extension: %s (%s) version %s", + ext_props[i].extensionName, + phys_dev->this_icd->this_icd_lib->lib_name, spec_version); + res = loader_add_to_ext_list(inst, ext_list, 1, &ext_props[i]); + if (res != VK_SUCCESS) + return res; + } + + return VK_SUCCESS; +} + +static VkResult loader_add_device_extensions( + const struct loader_instance *inst, struct loader_icd *icd, + VkPhysicalDevice physical_device, const char *lib_name, + struct loader_extension_list *ext_list) { + uint32_t i, count; + VkResult res; + VkExtensionProperties *ext_props; + + res = icd->EnumerateDeviceExtensionProperties(physical_device, NULL, &count, + NULL); + if (res == VK_SUCCESS && count > 0) { + ext_props = loader_stack_alloc(count * sizeof(VkExtensionProperties)); + if (!ext_props) + return VK_ERROR_OUT_OF_HOST_MEMORY; + res = icd->EnumerateDeviceExtensionProperties(physical_device, NULL, + &count, ext_props); + if (res != VK_SUCCESS) + return res; + for (i = 0; i < count; i++) { + char spec_version[64]; + + snprintf(spec_version, sizeof(spec_version), "%d.%d.%d", + VK_MAJOR(ext_props[i].specVersion), + VK_MINOR(ext_props[i].specVersion), + VK_PATCH(ext_props[i].specVersion)); + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Device Extension: %s (%s) version %s", + ext_props[i].extensionName, lib_name, spec_version); + res = loader_add_to_ext_list(inst, ext_list, 1, &ext_props[i]); + if (res != VK_SUCCESS) + return res; + } + } else { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Error getting physical device extension info count from " + "library %s", + lib_name); + return res; + } + + return VK_SUCCESS; +} + +static bool loader_init_generic_list(const struct loader_instance *inst, + struct loader_generic_list *list_info, + size_t element_size) { + list_info->capacity = 32 * element_size; + list_info->list = loader_heap_alloc(inst, list_info->capacity, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (list_info->list == NULL) { + return false; + } + memset(list_info->list, 0, list_info->capacity); + list_info->count = 0; + return true; +} + +void loader_destroy_generic_list(const struct loader_instance *inst, + struct loader_generic_list *list) { + loader_heap_free(inst, list->list); + list->count = 0; + list->capacity = 0; +} + +/* + * Append non-duplicate extension properties defined in props + * to the given ext_list. + * Return + * Vk_SUCCESS on success + */ +VkResult loader_add_to_ext_list(const struct loader_instance *inst, + struct loader_extension_list *ext_list, + uint32_t prop_list_count, + const VkExtensionProperties *props) { + uint32_t i; + const VkExtensionProperties *cur_ext; + + if (ext_list->list == NULL || ext_list->capacity == 0) { + loader_init_generic_list(inst, (struct loader_generic_list *)ext_list, + sizeof(VkExtensionProperties)); + } + + if (ext_list->list == NULL) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + for (i = 0; i < prop_list_count; i++) { + cur_ext = &props[i]; + + // look for duplicates + if (has_vk_extension_property(cur_ext, ext_list)) { + continue; + } + + // add to list at end + // check for enough capacity + if (ext_list->count * sizeof(VkExtensionProperties) >= + ext_list->capacity) { + + ext_list->list = loader_heap_realloc( + inst, ext_list->list, ext_list->capacity, + ext_list->capacity * 2, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + + if (ext_list->list == NULL) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + // double capacity + ext_list->capacity *= 2; + } + + memcpy(&ext_list->list[ext_list->count], cur_ext, + sizeof(VkExtensionProperties)); + ext_list->count++; + } + return VK_SUCCESS; +} + +/* + * Append one extension property defined in props with entrypoints + * defined in entrys to the given ext_list. + * Return + * Vk_SUCCESS on success + */ +VkResult +loader_add_to_dev_ext_list(const struct loader_instance *inst, + struct loader_device_extension_list *ext_list, + const VkExtensionProperties *props, + uint32_t entry_count, char **entrys) { + uint32_t idx; + if (ext_list->list == NULL || ext_list->capacity == 0) { + loader_init_generic_list(inst, (struct loader_generic_list *)ext_list, + sizeof(struct loader_dev_ext_props)); + } + + if (ext_list->list == NULL) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + idx = ext_list->count; + // add to list at end + // check for enough capacity + if (idx * sizeof(struct loader_dev_ext_props) >= ext_list->capacity) { + + ext_list->list = loader_heap_realloc( + inst, ext_list->list, ext_list->capacity, ext_list->capacity * 2, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + + if (ext_list->list == NULL) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + // double capacity + ext_list->capacity *= 2; + } + + memcpy(&ext_list->list[idx].props, props, + sizeof(struct loader_dev_ext_props)); + ext_list->list[idx].entrypoint_count = entry_count; + ext_list->list[idx].entrypoints = + loader_heap_alloc(inst, sizeof(char *) * entry_count, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (ext_list->list[idx].entrypoints == NULL) + return VK_ERROR_OUT_OF_HOST_MEMORY; + for (uint32_t i = 0; i < entry_count; i++) { + ext_list->list[idx].entrypoints[i] = loader_heap_alloc( + inst, strlen(entrys[i]) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (ext_list->list[idx].entrypoints[i] == NULL) + return VK_ERROR_OUT_OF_HOST_MEMORY; + strcpy(ext_list->list[idx].entrypoints[i], entrys[i]); + } + ext_list->count++; + + return VK_SUCCESS; +} + +/** + * Search the given search_list for any layers in the props list. + * Add these to the output layer_list. Don't add duplicates to the output + * layer_list. + */ +static VkResult +loader_add_layer_names_to_list(const struct loader_instance *inst, + struct loader_layer_list *output_list, + uint32_t name_count, const char *const *names, + const struct loader_layer_list *search_list) { + struct loader_layer_properties *layer_prop; + VkResult err = VK_SUCCESS; + + for (uint32_t i = 0; i < name_count; i++) { + const char *search_target = names[i]; + layer_prop = loader_get_layer_property(search_target, search_list); + if (!layer_prop) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Unable to find layer %s", search_target); + err = VK_ERROR_LAYER_NOT_PRESENT; + continue; + } + + loader_add_to_layer_list(inst, output_list, 1, layer_prop); + } + + return err; +} + +/* + * Manage lists of VkLayerProperties + */ +static bool loader_init_layer_list(const struct loader_instance *inst, + struct loader_layer_list *list) { + list->capacity = 32 * sizeof(struct loader_layer_properties); + list->list = loader_heap_alloc(inst, list->capacity, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (list->list == NULL) { + return false; + } + memset(list->list, 0, list->capacity); + list->count = 0; + return true; +} + +void loader_destroy_layer_list(const struct loader_instance *inst, + struct loader_layer_list *layer_list) { + loader_heap_free(inst, layer_list->list); + layer_list->count = 0; + layer_list->capacity = 0; +} + +/* + * Manage list of layer libraries (loader_lib_info) + */ +static bool +loader_init_layer_library_list(const struct loader_instance *inst, + struct loader_layer_library_list *list) { + list->capacity = 32 * sizeof(struct loader_lib_info); + list->list = loader_heap_alloc(inst, list->capacity, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (list->list == NULL) { + return false; + } + memset(list->list, 0, list->capacity); + list->count = 0; + return true; +} + +void loader_destroy_layer_library_list(const struct loader_instance *inst, + struct loader_layer_library_list *list) { + for (uint32_t i = 0; i < list->count; i++) { + loader_heap_free(inst, list->list[i].lib_name); + } + loader_heap_free(inst, list->list); + list->count = 0; + list->capacity = 0; +} + +void loader_add_to_layer_library_list(const struct loader_instance *inst, + struct loader_layer_library_list *list, + uint32_t item_count, + const struct loader_lib_info *new_items) { + uint32_t i; + struct loader_lib_info *item; + + if (list->list == NULL || list->capacity == 0) { + loader_init_layer_library_list(inst, list); + } + + if (list->list == NULL) + return; + + for (i = 0; i < item_count; i++) { + item = (struct loader_lib_info *)&new_items[i]; + + // look for duplicates + for (uint32_t j = 0; j < list->count; j++) { + if (strcmp(list->list[i].lib_name, new_items->lib_name) == 0) { + continue; + } + } + + // add to list at end + // check for enough capacity + if (list->count * sizeof(struct loader_lib_info) >= list->capacity) { + + list->list = loader_heap_realloc( + inst, list->list, list->capacity, list->capacity * 2, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + // double capacity + list->capacity *= 2; + } + + memcpy(&list->list[list->count], item, sizeof(struct loader_lib_info)); + list->count++; + } +} + +/* + * Search the given layer list for a list + * matching the given VkLayerProperties + */ +bool has_vk_layer_property(const VkLayerProperties *vk_layer_prop, + const struct loader_layer_list *list) { + for (uint32_t i = 0; i < list->count; i++) { + if (strcmp(vk_layer_prop->layerName, list->list[i].info.layerName) == 0) + return true; + } + return false; +} + +/* + * Search the given layer list for a layer + * matching the given name + */ +bool has_layer_name(const char *name, const struct loader_layer_list *list) { + for (uint32_t i = 0; i < list->count; i++) { + if (strcmp(name, list->list[i].info.layerName) == 0) + return true; + } + return false; +} + +/* + * Append non-duplicate layer properties defined in prop_list + * to the given layer_info list + */ +void loader_add_to_layer_list(const struct loader_instance *inst, + struct loader_layer_list *list, + uint32_t prop_list_count, + const struct loader_layer_properties *props) { + uint32_t i; + struct loader_layer_properties *layer; + + if (list->list == NULL || list->capacity == 0) { + loader_init_layer_list(inst, list); + } + + if (list->list == NULL) + return; + + for (i = 0; i < prop_list_count; i++) { + layer = (struct loader_layer_properties *)&props[i]; + + // look for duplicates + if (has_vk_layer_property(&layer->info, list)) { + continue; + } + + // add to list at end + // check for enough capacity + if (list->count * sizeof(struct loader_layer_properties) >= + list->capacity) { + + list->list = loader_heap_realloc( + inst, list->list, list->capacity, list->capacity * 2, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + // double capacity + list->capacity *= 2; + } + + memcpy(&list->list[list->count], layer, + sizeof(struct loader_layer_properties)); + list->count++; + } +} + +/** + * Search the search_list for any layer with a name + * that matches the given name and a type that matches the given type + * Add all matching layers to the found_list + * Do not add if found loader_layer_properties is already + * on the found_list. + */ +static void +loader_find_layer_name_add_list(const struct loader_instance *inst, + const char *name, const enum layer_type type, + const struct loader_layer_list *search_list, + struct loader_layer_list *found_list) { + bool found = false; + for (uint32_t i = 0; i < search_list->count; i++) { + struct loader_layer_properties *layer_prop = &search_list->list[i]; + if (0 == strcmp(layer_prop->info.layerName, name) && + (layer_prop->type & type)) { + /* Found a layer with the same name, add to found_list */ + loader_add_to_layer_list(inst, found_list, 1, layer_prop); + found = true; + } + } + if (!found) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Warning, couldn't find layer name %s to activate", name); + } +} + +static VkExtensionProperties * +get_extension_property(const char *name, + const struct loader_extension_list *list) { + for (uint32_t i = 0; i < list->count; i++) { + if (strcmp(name, list->list[i].extensionName) == 0) + return &list->list[i]; + } + return NULL; +} + +static VkExtensionProperties * +get_dev_extension_property(const char *name, + const struct loader_device_extension_list *list) { + for (uint32_t i = 0; i < list->count; i++) { + if (strcmp(name, list->list[i].props.extensionName) == 0) + return &list->list[i].props; + } + return NULL; +} + +/* + * This function will return the pNext pointer of any + * CreateInfo extensions that are not loader extensions. + * This is used to skip past the loader extensions prepended + * to the list during CreateInstance and CreateDevice. + */ +void *loader_strip_create_extensions(const void *pNext) { + VkLayerInstanceCreateInfo *create_info = (VkLayerInstanceCreateInfo *)pNext; + + while ( + create_info && + (create_info->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO || + create_info->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO)) { + create_info = (VkLayerInstanceCreateInfo *)create_info->pNext; + } + + return create_info; +} + +/* + * For Instance extensions implemented within the loader (i.e. DEBUG_REPORT + * the extension must provide two entry points for the loader to use: + * - "trampoline" entry point - this is the address returned by GetProcAddr + * and will always do what's necessary to support a global call. + * - "terminator" function - this function will be put at the end of the + * instance chain and will contain the necessary logic to call / process + * the extension for the appropriate ICDs that are available. + * There is no generic mechanism for including these functions, the references + * must be placed into the appropriate loader entry points. + * GetInstanceProcAddr: call extension GetInstanceProcAddr to check for + * GetProcAddr requests + * loader_coalesce_extensions(void) - add extension records to the list of + * global + * extension available to the app. + * instance_disp - add function pointer for terminator function to this array. + * The extension itself should be in a separate file that will be + * linked directly with the loader. + */ + +void loader_get_icd_loader_instance_extensions( + const struct loader_instance *inst, struct loader_icd_libs *icd_libs, + struct loader_extension_list *inst_exts) { + struct loader_extension_list icd_exts; + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Build ICD instance extension list"); + // traverse scanned icd list adding non-duplicate extensions to the list + for (uint32_t i = 0; i < icd_libs->count; i++) { + loader_init_generic_list(inst, (struct loader_generic_list *)&icd_exts, + sizeof(VkExtensionProperties)); + loader_add_instance_extensions( + inst, icd_libs->list[i].EnumerateInstanceExtensionProperties, + icd_libs->list[i].lib_name, &icd_exts); + loader_add_to_ext_list(inst, inst_exts, icd_exts.count, icd_exts.list); + loader_destroy_generic_list(inst, + (struct loader_generic_list *)&icd_exts); + }; + + // Traverse loader's extensions, adding non-duplicate extensions to the list + wsi_add_instance_extensions(inst, inst_exts); + debug_report_add_instance_extensions(inst, inst_exts); +} + +struct loader_physical_device * +loader_get_physical_device(const VkPhysicalDevice physdev) { + uint32_t i; + for (struct loader_instance *inst = loader.instances; inst; + inst = inst->next) { + for (i = 0; i < inst->total_gpu_count; i++) { + // TODO this aliases physDevices within instances, need for this + // function to go away + if (inst->phys_devs[i].disp == + loader_get_instance_dispatch(physdev)) { + return &inst->phys_devs[i]; + } + } + } + return NULL; +} + +struct loader_icd *loader_get_icd_and_device(const VkDevice device, + struct loader_device **found_dev) { + *found_dev = NULL; + for (struct loader_instance *inst = loader.instances; inst; + inst = inst->next) { + for (struct loader_icd *icd = inst->icds; icd; icd = icd->next) { + for (struct loader_device *dev = icd->logical_device_list; dev; + dev = dev->next) + /* Value comparison of device prevents object wrapping by layers + */ + if (loader_get_dispatch(dev->device) == + loader_get_dispatch(device)) { + *found_dev = dev; + return icd; + } + } + } + return NULL; +} + +static void loader_destroy_logical_device(const struct loader_instance *inst, + struct loader_device *dev) { + loader_heap_free(inst, dev->app_extension_props); + loader_destroy_layer_list(inst, &dev->activated_layer_list); + loader_heap_free(inst, dev); +} + +static struct loader_device * +loader_add_logical_device(const struct loader_instance *inst, + struct loader_device **device_list) { + struct loader_device *new_dev; + + new_dev = loader_heap_alloc(inst, sizeof(struct loader_device), + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE); + if (!new_dev) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to alloc struct loader-device"); + return NULL; + } + + memset(new_dev, 0, sizeof(struct loader_device)); + + new_dev->next = *device_list; + *device_list = new_dev; + return new_dev; +} + +void loader_remove_logical_device(const struct loader_instance *inst, + struct loader_icd *icd, + struct loader_device *found_dev) { + struct loader_device *dev, *prev_dev; + + if (!icd || !found_dev) + return; + + prev_dev = NULL; + dev = icd->logical_device_list; + while (dev && dev != found_dev) { + prev_dev = dev; + dev = dev->next; + } + + if (prev_dev) + prev_dev->next = found_dev->next; + else + icd->logical_device_list = found_dev->next; + loader_destroy_logical_device(inst, found_dev); +} + +static void loader_icd_destroy(struct loader_instance *ptr_inst, + struct loader_icd *icd) { + ptr_inst->total_icd_count--; + for (struct loader_device *dev = icd->logical_device_list; dev;) { + struct loader_device *next_dev = dev->next; + loader_destroy_logical_device(ptr_inst, dev); + dev = next_dev; + } + + loader_heap_free(ptr_inst, icd); +} + +static struct loader_icd * +loader_icd_create(const struct loader_instance *inst) { + struct loader_icd *icd; + + icd = loader_heap_alloc(inst, sizeof(*icd), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (!icd) + return NULL; + + memset(icd, 0, sizeof(*icd)); + + return icd; +} + +static struct loader_icd * +loader_icd_add(struct loader_instance *ptr_inst, + const struct loader_scanned_icds *icd_lib) { + struct loader_icd *icd; + + icd = loader_icd_create(ptr_inst); + if (!icd) + return NULL; + + icd->this_icd_lib = icd_lib; + icd->this_instance = ptr_inst; + + /* prepend to the list */ + icd->next = ptr_inst->icds; + ptr_inst->icds = icd; + ptr_inst->total_icd_count++; + + return icd; +} + +void loader_scanned_icd_clear(const struct loader_instance *inst, + struct loader_icd_libs *icd_libs) { + if (icd_libs->capacity == 0) + return; + for (uint32_t i = 0; i < icd_libs->count; i++) { + loader_platform_close_library(icd_libs->list[i].handle); + loader_heap_free(inst, icd_libs->list[i].lib_name); + } + loader_heap_free(inst, icd_libs->list); + icd_libs->capacity = 0; + icd_libs->count = 0; + icd_libs->list = NULL; +} + +static void loader_scanned_icd_init(const struct loader_instance *inst, + struct loader_icd_libs *icd_libs) { + loader_scanned_icd_clear(inst, icd_libs); + icd_libs->capacity = 8 * sizeof(struct loader_scanned_icds); + icd_libs->list = loader_heap_alloc(inst, icd_libs->capacity, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); +} + +static void loader_scanned_icd_add(const struct loader_instance *inst, + struct loader_icd_libs *icd_libs, + const char *filename, uint32_t api_version) { + loader_platform_dl_handle handle; + PFN_vkCreateInstance fp_create_inst; + PFN_vkEnumerateInstanceExtensionProperties fp_get_inst_ext_props; + PFN_vkGetInstanceProcAddr fp_get_proc_addr; + struct loader_scanned_icds *new_node; + + /* TODO implement ref counting of libraries, for now this function leaves + libraries open and the scanned_icd_clear closes them */ + // Used to call: dlopen(filename, RTLD_LAZY); + handle = loader_platform_open_library(filename); + if (!handle) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + loader_platform_open_library_error(filename)); + return; + } + + fp_get_proc_addr = + loader_platform_get_proc_address(handle, "vk_icdGetInstanceProcAddr"); + if (!fp_get_proc_addr) { + // Use deprecated interface + fp_get_proc_addr = + loader_platform_get_proc_address(handle, "vkGetInstanceProcAddr"); + if (!fp_get_proc_addr) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + loader_platform_get_proc_address_error( + "vk_icdGetInstanceProcAddr")); + return; + } else { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Using deprecated ICD interface of " + "vkGetInstanceProcAddr instead of " + "vk_icdGetInstanceProcAddr"); + } + fp_create_inst = + loader_platform_get_proc_address(handle, "vkCreateInstance"); + if (!fp_create_inst) { + loader_log( + inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Couldn't get vkCreateInstance via dlsym/loadlibrary from ICD"); + return; + } + fp_get_inst_ext_props = loader_platform_get_proc_address( + handle, "vkEnumerateInstanceExtensionProperties"); + if (!fp_get_inst_ext_props) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Couldn't get vkEnumerateInstanceExtensionProperties " + "via dlsym/loadlibrary from ICD"); + return; + } + } else { + // Use newer interface + fp_create_inst = + (PFN_vkCreateInstance)fp_get_proc_addr(NULL, "vkCreateInstance"); + if (!fp_create_inst) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Couldn't get vkCreateInstance via " + "vk_icdGetInstanceProcAddr from ICD"); + return; + } + fp_get_inst_ext_props = + (PFN_vkEnumerateInstanceExtensionProperties)fp_get_proc_addr( + NULL, "vkEnumerateInstanceExtensionProperties"); + if (!fp_get_inst_ext_props) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Couldn't get vkEnumerateInstanceExtensionProperties " + "via vk_icdGetInstanceProcAddr from ICD"); + return; + } + } + + // check for enough capacity + if ((icd_libs->count * sizeof(struct loader_scanned_icds)) >= + icd_libs->capacity) { + + icd_libs->list = loader_heap_realloc( + inst, icd_libs->list, icd_libs->capacity, icd_libs->capacity * 2, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + // double capacity + icd_libs->capacity *= 2; + } + new_node = &(icd_libs->list[icd_libs->count]); + + new_node->handle = handle; + new_node->api_version = api_version; + new_node->GetInstanceProcAddr = fp_get_proc_addr; + new_node->EnumerateInstanceExtensionProperties = fp_get_inst_ext_props; + new_node->CreateInstance = fp_create_inst; + + new_node->lib_name = (char *)loader_heap_alloc( + inst, strlen(filename) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (!new_node->lib_name) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Out of memory can't add icd"); + return; + } + strcpy(new_node->lib_name, filename); + icd_libs->count++; +} + +static bool loader_icd_init_entrys(struct loader_icd *icd, VkInstance inst, + const PFN_vkGetInstanceProcAddr fp_gipa) { +/* initialize entrypoint function pointers */ + +#define LOOKUP_GIPA(func, required) \ + do { \ + icd->func = (PFN_vk##func)fp_gipa(inst, "vk" #func); \ + if (!icd->func && required) { \ + loader_log((struct loader_instance *)inst, \ + VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, \ + loader_platform_get_proc_address_error("vk" #func)); \ + return false; \ + } \ + } while (0) + + LOOKUP_GIPA(GetDeviceProcAddr, true); + LOOKUP_GIPA(DestroyInstance, true); + LOOKUP_GIPA(EnumeratePhysicalDevices, true); + LOOKUP_GIPA(GetPhysicalDeviceFeatures, true); + LOOKUP_GIPA(GetPhysicalDeviceFormatProperties, true); + LOOKUP_GIPA(GetPhysicalDeviceImageFormatProperties, true); + LOOKUP_GIPA(CreateDevice, true); + LOOKUP_GIPA(GetPhysicalDeviceProperties, true); + LOOKUP_GIPA(GetPhysicalDeviceMemoryProperties, true); + LOOKUP_GIPA(GetPhysicalDeviceQueueFamilyProperties, true); + LOOKUP_GIPA(EnumerateDeviceExtensionProperties, true); + LOOKUP_GIPA(GetPhysicalDeviceSparseImageFormatProperties, true); + LOOKUP_GIPA(CreateDebugReportCallbackEXT, false); + LOOKUP_GIPA(DestroyDebugReportCallbackEXT, false); + LOOKUP_GIPA(GetPhysicalDeviceSurfaceSupportKHR, false); + LOOKUP_GIPA(GetPhysicalDeviceSurfaceCapabilitiesKHR, false); + LOOKUP_GIPA(GetPhysicalDeviceSurfaceFormatsKHR, false); + LOOKUP_GIPA(GetPhysicalDeviceSurfacePresentModesKHR, false); +#ifdef VK_USE_PLATFORM_WIN32_KHR + LOOKUP_GIPA(GetPhysicalDeviceWin32PresentationSupportKHR, false); +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + LOOKUP_GIPA(GetPhysicalDeviceXcbPresentationSupportKHR, false); +#endif + +#undef LOOKUP_GIPA + + return true; +} + +static void loader_debug_init(void) { + const char *env, *orig; + + if (g_loader_debug > 0) + return; + + g_loader_debug = 0; + + /* parse comma-separated debug options */ + orig = env = loader_getenv("VK_LOADER_DEBUG"); + while (env) { + const char *p = strchr(env, ','); + size_t len; + + if (p) + len = p - env; + else + len = strlen(env); + + if (len > 0) { + if (strncmp(env, "all", len) == 0) { + g_loader_debug = ~0u; + g_loader_log_msgs = ~0u; + } else if (strncmp(env, "warn", len) == 0) { + g_loader_debug |= LOADER_WARN_BIT; + g_loader_log_msgs |= VK_DEBUG_REPORT_WARNING_BIT_EXT; + } else if (strncmp(env, "info", len) == 0) { + g_loader_debug |= LOADER_INFO_BIT; + g_loader_log_msgs |= VK_DEBUG_REPORT_INFORMATION_BIT_EXT; + } else if (strncmp(env, "perf", len) == 0) { + g_loader_debug |= LOADER_PERF_BIT; + g_loader_log_msgs |= VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT; + } else if (strncmp(env, "error", len) == 0) { + g_loader_debug |= LOADER_ERROR_BIT; + g_loader_log_msgs |= VK_DEBUG_REPORT_ERROR_BIT_EXT; + } else if (strncmp(env, "debug", len) == 0) { + g_loader_debug |= LOADER_DEBUG_BIT; + g_loader_log_msgs |= VK_DEBUG_REPORT_DEBUG_BIT_EXT; + } + } + + if (!p) + break; + + env = p + 1; + } + + loader_free_getenv(orig); +} + +void loader_initialize(void) { + // initialize mutexs + loader_platform_thread_create_mutex(&loader_lock); + loader_platform_thread_create_mutex(&loader_json_lock); + + // initialize logging + loader_debug_init(); + + // initial cJSON to use alloc callbacks + cJSON_Hooks alloc_fns = { + .malloc_fn = loader_tls_heap_alloc, .free_fn = loader_tls_heap_free, + }; + cJSON_InitHooks(&alloc_fns); +} + +struct loader_manifest_files { + uint32_t count; + char **filename_list; +}; + +/** + * Get next file or dirname given a string list or registry key path + * + * \returns + * A pointer to first char in the next path. + * The next path (or NULL) in the list is returned in next_path. + * Note: input string is modified in some cases. PASS IN A COPY! + */ +static char *loader_get_next_path(char *path) { + uint32_t len; + char *next; + + if (path == NULL) + return NULL; + next = strchr(path, PATH_SEPERATOR); + if (next == NULL) { + len = (uint32_t)strlen(path); + next = path + len; + } else { + *next = '\0'; + next++; + } + + return next; +} + +/** + * Given a path which is absolute or relative, expand the path if relative or + * leave the path unmodified if absolute. The base path to prepend to relative + * paths is given in rel_base. + * + * \returns + * A string in out_fullpath of the full absolute path + */ +static void loader_expand_path(const char *path, const char *rel_base, + size_t out_size, char *out_fullpath) { + if (loader_platform_is_path_absolute(path)) { + // do not prepend a base to an absolute path + rel_base = ""; + } + + loader_platform_combine_path(out_fullpath, out_size, rel_base, path, NULL); +} + +/** + * Given a filename (file) and a list of paths (dir), try to find an existing + * file in the paths. If filename already is a path then no + * searching in the given paths. + * + * \returns + * A string in out_fullpath of either the full path or file. + */ +static void loader_get_fullpath(const char *file, const char *dirs, + size_t out_size, char *out_fullpath) { + if (!loader_platform_is_path(file) && *dirs) { + char *dirs_copy, *dir, *next_dir; + + dirs_copy = loader_stack_alloc(strlen(dirs) + 1); + strcpy(dirs_copy, dirs); + + // find if file exists after prepending paths in given list + for (dir = dirs_copy; *dir && (next_dir = loader_get_next_path(dir)); + dir = next_dir) { + loader_platform_combine_path(out_fullpath, out_size, dir, file, + NULL); + if (loader_platform_file_exists(out_fullpath)) { + return; + } + } + } + + snprintf(out_fullpath, out_size, "%s", file); +} + +/** + * Read a JSON file into a buffer. + * + * \returns + * A pointer to a cJSON object representing the JSON parse tree. + * This returned buffer should be freed by caller. + */ +static cJSON *loader_get_json(const struct loader_instance *inst, + const char *filename) { + FILE *file; + char *json_buf; + cJSON *json; + size_t len; + file = fopen(filename, "rb"); + if (!file) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Couldn't open JSON file %s", filename); + return NULL; + } + fseek(file, 0, SEEK_END); + len = ftell(file); + fseek(file, 0, SEEK_SET); + json_buf = (char *)loader_stack_alloc(len + 1); + if (json_buf == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't get JSON file"); + fclose(file); + return NULL; + } + if (fread(json_buf, sizeof(char), len, file) != len) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "fread failed can't get JSON file"); + fclose(file); + return NULL; + } + fclose(file); + json_buf[len] = '\0'; + + // parse text from file + json = cJSON_Parse(json_buf); + if (json == NULL) + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Can't parse JSON file %s", filename); + return json; +} + +/** + * Do a deep copy of the loader_layer_properties structure. + */ +static void loader_copy_layer_properties(const struct loader_instance *inst, + struct loader_layer_properties *dst, + struct loader_layer_properties *src) { + uint32_t cnt, i; + memcpy(dst, src, sizeof(*src)); + dst->instance_extension_list.list = + loader_heap_alloc(inst, sizeof(VkExtensionProperties) * + src->instance_extension_list.count, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + dst->instance_extension_list.capacity = + sizeof(VkExtensionProperties) * src->instance_extension_list.count; + memcpy(dst->instance_extension_list.list, src->instance_extension_list.list, + dst->instance_extension_list.capacity); + dst->device_extension_list.list = + loader_heap_alloc(inst, sizeof(struct loader_dev_ext_props) * + src->device_extension_list.count, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + + dst->device_extension_list.capacity = + sizeof(struct loader_dev_ext_props) * src->device_extension_list.count; + memcpy(dst->device_extension_list.list, src->device_extension_list.list, + dst->device_extension_list.capacity); + if (src->device_extension_list.count > 0 && + src->device_extension_list.list->entrypoint_count > 0) { + cnt = src->device_extension_list.list->entrypoint_count; + dst->device_extension_list.list->entrypoints = loader_heap_alloc( + inst, sizeof(char *) * cnt, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + for (i = 0; i < cnt; i++) { + dst->device_extension_list.list->entrypoints[i] = loader_heap_alloc( + inst, + strlen(src->device_extension_list.list->entrypoints[i]) + 1, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + strcpy(dst->device_extension_list.list->entrypoints[i], + src->device_extension_list.list->entrypoints[i]); + } + } +} + +static bool +loader_find_layer_name_list(const char *name, + const struct loader_layer_list *layer_list) { + if (!layer_list) + return false; + for (uint32_t j = 0; j < layer_list->count; j++) + if (!strcmp(name, layer_list->list[j].info.layerName)) + return true; + return false; +} + +static bool loader_find_layer_name(const char *name, uint32_t layer_count, + const char **layer_list) { + if (!layer_list) + return false; + for (uint32_t j = 0; j < layer_count; j++) + if (!strcmp(name, layer_list[j])) + return true; + return false; +} + +static bool loader_find_layer_name_array( + const char *name, uint32_t layer_count, + const char layer_list[][VK_MAX_EXTENSION_NAME_SIZE]) { + if (!layer_list) + return false; + for (uint32_t j = 0; j < layer_count; j++) + if (!strcmp(name, layer_list[j])) + return true; + return false; +} + +/** + * Searches through an array of layer names (ppp_layer_names) looking for a + * layer key_name. + * If not found then simply returns updating nothing. + * Otherwise, it uses expand_count, expand_names adding them to layer names. + * Any duplicate (pre-existing) exapand_names in layer names are removed. + * Expand names are added to the back/end of the list of layer names. + * @param inst + * @param layer_count + * @param ppp_layer_names + */ +void loader_expand_layer_names( + const struct loader_instance *inst, const char *key_name, + uint32_t expand_count, + const char expand_names[][VK_MAX_EXTENSION_NAME_SIZE], + uint32_t *layer_count, char ***ppp_layer_names) { + char **pp_layer_names, **pp_src_layers = *ppp_layer_names; + + if (!loader_find_layer_name(key_name, *layer_count, + (const char **)pp_src_layers)) + return; // didn't find the key_name in the list + + // since the total number of layers may expand, allocate new memory for the + // array of pointers + pp_layer_names = + loader_heap_alloc(inst, (expand_count + *layer_count) * sizeof(char *), + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "Found meta layer %s, replacing with actual layer group", + key_name); + // In place removal of any expand_names found in layer_name (remove + // duplicates) + // Also remove the key_name + uint32_t src_idx, dst_idx, cnt = *layer_count; + for (src_idx = 0; src_idx < *layer_count; src_idx++) { + if (loader_find_layer_name_array(pp_src_layers[src_idx], expand_count, + expand_names)) { + pp_src_layers[src_idx] = NULL; + cnt--; + } else if (!strcmp(pp_src_layers[src_idx], key_name)) { + pp_src_layers[src_idx] = NULL; + cnt--; + } + pp_layer_names[src_idx] = pp_src_layers[src_idx]; + } + for (dst_idx = 0; dst_idx < cnt; dst_idx++) { + if (pp_layer_names[dst_idx] == NULL) { + src_idx = dst_idx + 1; + while (src_idx < *layer_count && pp_src_layers[src_idx] == NULL) + src_idx++; + if (src_idx < *layer_count && pp_src_layers[src_idx] != NULL) + pp_layer_names[dst_idx] = pp_src_layers[src_idx]; + } + } + + // Add the expand_names to layer_names + src_idx = 0; + for (dst_idx = cnt; dst_idx < cnt + expand_count; dst_idx++) { + pp_layer_names[dst_idx] = (char *)&expand_names[src_idx++][0]; + } + *layer_count = expand_count + cnt; + *ppp_layer_names = pp_layer_names; + return; +} + +/** + * Restores the layer name list and count into the pCreatInfo structure. + * If is_device == tru then pCreateInfo is a device structure else an instance + * structure. + * @param layer_count + * @param layer_names + * @param pCreateInfo + */ +void loader_unexpand_dev_layer_names(const struct loader_instance *inst, + uint32_t layer_count, char **layer_names, + char **layer_ptr, + const VkDeviceCreateInfo *pCreateInfo) { + uint32_t *p_cnt = (uint32_t *)&pCreateInfo->enabledLayerCount; + *p_cnt = layer_count; + + char ***p_ptr = (char ***)&pCreateInfo->ppEnabledLayerNames; + if ((char **)pCreateInfo->ppEnabledLayerNames != layer_ptr) + loader_heap_free(inst, (void *)pCreateInfo->ppEnabledLayerNames); + *p_ptr = layer_ptr; + for (uint32_t i = 0; i < layer_count; i++) { + char **pp_str = (char **)&pCreateInfo->ppEnabledLayerNames[i]; + *pp_str = layer_names[i]; + } +} + +void loader_unexpand_inst_layer_names(const struct loader_instance *inst, + uint32_t layer_count, char **layer_names, + char **layer_ptr, + const VkInstanceCreateInfo *pCreateInfo) { + uint32_t *p_cnt = (uint32_t *)&pCreateInfo->enabledLayerCount; + *p_cnt = layer_count; + + char ***p_ptr = (char ***)&pCreateInfo->ppEnabledLayerNames; + if ((char **)pCreateInfo->ppEnabledLayerNames != layer_ptr) + loader_heap_free(inst, (void *)pCreateInfo->ppEnabledLayerNames); + *p_ptr = layer_ptr; + for (uint32_t i = 0; i < layer_count; i++) { + char **pp_str = (char **)&pCreateInfo->ppEnabledLayerNames[i]; + *pp_str = layer_names[i]; + } +} + +/** + * Searches through the existing instance and device layer lists looking for + * the set of required layer names. If found then it adds a meta property to the + * layer list. + * Assumes the required layers are the same for both instance and device lists. + * @param inst + * @param layer_count number of layers in layer_names + * @param layer_names array of required layer names + * @param layer_instance_list + * @param layer_device_list + */ +static void loader_add_layer_property_meta( + const struct loader_instance *inst, uint32_t layer_count, + const char layer_names[][VK_MAX_EXTENSION_NAME_SIZE], + struct loader_layer_list *layer_instance_list, + struct loader_layer_list *layer_device_list) { + uint32_t i, j; + bool found; + struct loader_layer_list *layer_list; + + if (0 == layer_count || + NULL == layer_instance_list || + NULL == layer_device_list || + (layer_count > layer_instance_list->count && + layer_count > layer_device_list->count)) + return; + + for (j = 0; j < 2; j++) { + if (j == 0) + layer_list = layer_instance_list; + else + layer_list = layer_device_list; + found = true; + for (i = 0; i < layer_count; i++) { + if (loader_find_layer_name_list(layer_names[i], layer_list)) + continue; + found = false; + break; + } + + struct loader_layer_properties *props; + if (found) { + props = loader_get_next_layer_property(inst, layer_list); + props->type = VK_LAYER_TYPE_META_EXPLICT; + strncpy(props->info.description, "LunarG Standard Validation Layer", + sizeof(props->info.description)); + props->info.implementationVersion = 1; + strncpy(props->info.layerName, std_validation_str, + sizeof(props->info.layerName)); + // TODO what about specVersion? for now insert loader's built + // version + props->info.specVersion = VK_API_VERSION; + } + } +} + +/** + * Given a cJSON struct (json) of the top level JSON object from layer manifest + * file, add entry to the layer_list. + * Fill out the layer_properties in this list entry from the input cJSON object. + * + * \returns + * void + * layer_list has a new entry and initialized accordingly. + * If the json input object does not have all the required fields no entry + * is added to the list. + */ +static void +loader_add_layer_properties(const struct loader_instance *inst, + struct loader_layer_list *layer_instance_list, + struct loader_layer_list *layer_device_list, + cJSON *json, bool is_implicit, char *filename) { + /* Fields in layer manifest file that are required: + * (required) “file_format_version” + * following are required in the "layer" object: + * (required) "name" + * (required) "type" + * (required) “library_path” + * (required) “api_version” + * (required) “implementation_version” + * (required) “description” + * (required for implicit layers) “disable_environment” + * + * First get all required items and if any missing abort + */ + + cJSON *item, *layer_node, *ext_item; + char *temp; + char *name, *type, *library_path, *api_version; + char *implementation_version, *description; + cJSON *disable_environment; + int i, j; + VkExtensionProperties ext_prop; + item = cJSON_GetObjectItem(json, "file_format_version"); + if (item == NULL) { + return; + } + char *file_vers = cJSON_PrintUnformatted(item); + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "Found manifest file %s, version %s", filename, file_vers); + if (strcmp(file_vers, "\"1.0.0\"") != 0) + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Unexpected manifest file version (expected 1.0.0), may " + "cause errors"); + loader_tls_heap_free(file_vers); + + layer_node = cJSON_GetObjectItem(json, "layer"); + if (layer_node == NULL) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Can't find \"layer\" object in manifest JSON file, " + "skipping this file"); + return; + } + + // loop through all "layer" objects in the file + do { +#define GET_JSON_OBJECT(node, var) \ + { \ + var = cJSON_GetObjectItem(node, #var); \ + if (var == NULL) { \ + layer_node = layer_node->next; \ + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, \ + "Didn't find required layer object %s in manifest " \ + "JSON file, skipping this layer", \ + #var); \ + continue; \ + } \ + } +#define GET_JSON_ITEM(node, var) \ + { \ + item = cJSON_GetObjectItem(node, #var); \ + if (item == NULL) { \ + layer_node = layer_node->next; \ + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, \ + "Didn't find required layer value %s in manifest JSON " \ + "file, skipping this layer", \ + #var); \ + continue; \ + } \ + temp = cJSON_Print(item); \ + temp[strlen(temp) - 1] = '\0'; \ + var = loader_stack_alloc(strlen(temp) + 1); \ + strcpy(var, &temp[1]); \ + loader_tls_heap_free(temp); \ + } + GET_JSON_ITEM(layer_node, name) + GET_JSON_ITEM(layer_node, type) + GET_JSON_ITEM(layer_node, library_path) + GET_JSON_ITEM(layer_node, api_version) + GET_JSON_ITEM(layer_node, implementation_version) + GET_JSON_ITEM(layer_node, description) + if (is_implicit) { + GET_JSON_OBJECT(layer_node, disable_environment) + } +#undef GET_JSON_ITEM +#undef GET_JSON_OBJECT + + // add list entry + struct loader_layer_properties *props = NULL; + if (!strcmp(type, "DEVICE")) { + if (layer_device_list == NULL) { + layer_node = layer_node->next; + continue; + } + props = loader_get_next_layer_property(inst, layer_device_list); + props->type = (is_implicit) ? VK_LAYER_TYPE_DEVICE_IMPLICIT + : VK_LAYER_TYPE_DEVICE_EXPLICIT; + } + if (!strcmp(type, "INSTANCE")) { + if (layer_instance_list == NULL) { + layer_node = layer_node->next; + continue; + } + props = loader_get_next_layer_property(inst, layer_instance_list); + props->type = (is_implicit) ? VK_LAYER_TYPE_INSTANCE_IMPLICIT + : VK_LAYER_TYPE_INSTANCE_EXPLICIT; + } + if (!strcmp(type, "GLOBAL")) { + if (layer_instance_list != NULL) + props = + loader_get_next_layer_property(inst, layer_instance_list); + else if (layer_device_list != NULL) + props = loader_get_next_layer_property(inst, layer_device_list); + else { + layer_node = layer_node->next; + continue; + } + props->type = (is_implicit) ? VK_LAYER_TYPE_GLOBAL_IMPLICIT + : VK_LAYER_TYPE_GLOBAL_EXPLICIT; + } + + if (props == NULL) { + layer_node = layer_node->next; + continue; + } + + strncpy(props->info.layerName, name, sizeof(props->info.layerName)); + props->info.layerName[sizeof(props->info.layerName) - 1] = '\0'; + + char *fullpath = props->lib_name; + char *rel_base; + if (loader_platform_is_path(library_path)) { + // a relative or absolute path + char *name_copy = loader_stack_alloc(strlen(filename) + 1); + strcpy(name_copy, filename); + rel_base = loader_platform_dirname(name_copy); + loader_expand_path(library_path, rel_base, MAX_STRING_SIZE, + fullpath); + } else { + // a filename which is assumed in a system directory + loader_get_fullpath(library_path, DEFAULT_VK_LAYERS_PATH, + MAX_STRING_SIZE, fullpath); + } + props->info.specVersion = loader_make_version(api_version); + props->info.implementationVersion = atoi(implementation_version); + strncpy((char *)props->info.description, description, + sizeof(props->info.description)); + props->info.description[sizeof(props->info.description) - 1] = '\0'; + if (is_implicit) { + strncpy(props->disable_env_var.name, + disable_environment->child->string, + sizeof(props->disable_env_var.name)); + props->disable_env_var + .name[sizeof(props->disable_env_var.name) - 1] = '\0'; + strncpy(props->disable_env_var.value, + disable_environment->child->valuestring, + sizeof(props->disable_env_var.value)); + props->disable_env_var + .value[sizeof(props->disable_env_var.value) - 1] = '\0'; + } + +/** + * Now get all optional items and objects and put in list: + * functions + * instance_extensions + * device_extensions + * enable_environment (implicit layers only) + */ +#define GET_JSON_OBJECT(node, var) \ + { var = cJSON_GetObjectItem(node, #var); } +#define GET_JSON_ITEM(node, var) \ + { \ + item = cJSON_GetObjectItem(node, #var); \ + if (item != NULL) { \ + temp = cJSON_Print(item); \ + temp[strlen(temp) - 1] = '\0'; \ + var = loader_stack_alloc(strlen(temp) + 1); \ + strcpy(var, &temp[1]); \ + loader_tls_heap_free(temp); \ + } \ + } + + cJSON *instance_extensions, *device_extensions, *functions, + *enable_environment; + cJSON *entrypoints; + char *vkGetInstanceProcAddr, *vkGetDeviceProcAddr, *spec_version; + char **entry_array; + vkGetInstanceProcAddr = NULL; + vkGetDeviceProcAddr = NULL; + spec_version = NULL; + entrypoints = NULL; + entry_array = NULL; + /** + * functions + * vkGetInstanceProcAddr + * vkGetDeviceProcAddr + */ + GET_JSON_OBJECT(layer_node, functions) + if (functions != NULL) { + GET_JSON_ITEM(functions, vkGetInstanceProcAddr) + GET_JSON_ITEM(functions, vkGetDeviceProcAddr) + if (vkGetInstanceProcAddr != NULL) + strncpy(props->functions.str_gipa, vkGetInstanceProcAddr, + sizeof(props->functions.str_gipa)); + props->functions.str_gipa[sizeof(props->functions.str_gipa) - 1] = + '\0'; + if (vkGetDeviceProcAddr != NULL) + strncpy(props->functions.str_gdpa, vkGetDeviceProcAddr, + sizeof(props->functions.str_gdpa)); + props->functions.str_gdpa[sizeof(props->functions.str_gdpa) - 1] = + '\0'; + } + /** + * instance_extensions + * array of + * name + * spec_version + */ + GET_JSON_OBJECT(layer_node, instance_extensions) + if (instance_extensions != NULL) { + int count = cJSON_GetArraySize(instance_extensions); + for (i = 0; i < count; i++) { + ext_item = cJSON_GetArrayItem(instance_extensions, i); + GET_JSON_ITEM(ext_item, name) + GET_JSON_ITEM(ext_item, spec_version) + if (name != NULL) { + strncpy(ext_prop.extensionName, name, + sizeof(ext_prop.extensionName)); + ext_prop.extensionName[sizeof(ext_prop.extensionName) - 1] = + '\0'; + } + ext_prop.specVersion = atoi(spec_version); + loader_add_to_ext_list(inst, &props->instance_extension_list, 1, + &ext_prop); + } + } + /** + * device_extensions + * array of + * name + * spec_version + * entrypoints + */ + GET_JSON_OBJECT(layer_node, device_extensions) + if (device_extensions != NULL) { + int count = cJSON_GetArraySize(device_extensions); + for (i = 0; i < count; i++) { + ext_item = cJSON_GetArrayItem(device_extensions, i); + GET_JSON_ITEM(ext_item, name) + GET_JSON_ITEM(ext_item, spec_version) + if (name != NULL) { + strncpy(ext_prop.extensionName, name, + sizeof(ext_prop.extensionName)); + ext_prop.extensionName[sizeof(ext_prop.extensionName) - 1] = + '\0'; + } + ext_prop.specVersion = atoi(spec_version); + // entrypoints = cJSON_GetObjectItem(ext_item, "entrypoints"); + GET_JSON_OBJECT(ext_item, entrypoints) + int entry_count; + if (entrypoints == NULL) { + loader_add_to_dev_ext_list(inst, + &props->device_extension_list, + &ext_prop, 0, NULL); + continue; + } + entry_count = cJSON_GetArraySize(entrypoints); + if (entry_count) + entry_array = (char **)loader_stack_alloc(sizeof(char *) * + entry_count); + for (j = 0; j < entry_count; j++) { + ext_item = cJSON_GetArrayItem(entrypoints, j); + if (ext_item != NULL) { + temp = cJSON_Print(ext_item); + temp[strlen(temp) - 1] = '\0'; + entry_array[j] = loader_stack_alloc(strlen(temp) + 1); + strcpy(entry_array[j], &temp[1]); + loader_tls_heap_free(temp); + } + } + loader_add_to_dev_ext_list(inst, &props->device_extension_list, + &ext_prop, entry_count, entry_array); + } + } + if (is_implicit) { + GET_JSON_OBJECT(layer_node, enable_environment) + + // enable_environment is optional + if (enable_environment) { + strncpy(props->enable_env_var.name, + enable_environment->child->string, + sizeof(props->enable_env_var.name)); + props->enable_env_var + .name[sizeof(props->enable_env_var.name) - 1] = '\0'; + strncpy(props->enable_env_var.value, + enable_environment->child->valuestring, + sizeof(props->enable_env_var.value)); + props->enable_env_var + .value[sizeof(props->enable_env_var.value) - 1] = '\0'; + } + } +#undef GET_JSON_ITEM +#undef GET_JSON_OBJECT + // for global layers need to add them to both device and instance list + if (!strcmp(type, "GLOBAL")) { + struct loader_layer_properties *dev_props; + if (layer_instance_list == NULL || layer_device_list == NULL) { + layer_node = layer_node->next; + continue; + } + dev_props = loader_get_next_layer_property(inst, layer_device_list); + // copy into device layer list + loader_copy_layer_properties(inst, dev_props, props); + } + layer_node = layer_node->next; + } while (layer_node != NULL); + return; +} + +/** + * Find the Vulkan library manifest files. + * + * This function scans the location or env_override directories/files + * for a list of JSON manifest files. If env_override is non-NULL + * and has a valid value. Then the location is ignored. Otherwise + * location is used to look for manifest files. The location + * is interpreted as Registry path on Windows and a directory path(s) + * on Linux. + * + * \returns + * A string list of manifest files to be opened in out_files param. + * List has a pointer to string for each manifest filename. + * When done using the list in out_files, pointers should be freed. + * Location or override string lists can be either files or directories as + *follows: + * | location | override + * -------------------------------- + * Win ICD | files | files + * Win Layer | files | dirs + * Linux ICD | dirs | files + * Linux Layer| dirs | dirs + */ +static void loader_get_manifest_files(const struct loader_instance *inst, + const char *env_override, bool is_layer, + const char *location, + struct loader_manifest_files *out_files) { + char *override = NULL; + char *loc; + char *file, *next_file, *name; + size_t alloced_count = 64; + char full_path[2048]; + DIR *sysdir = NULL; + bool list_is_dirs = false; + struct dirent *dent; + + out_files->count = 0; + out_files->filename_list = NULL; + + if (env_override != NULL && (override = loader_getenv(env_override))) { +#if !defined(_WIN32) + if (geteuid() != getuid()) { + /* Don't allow setuid apps to use the env var: */ + loader_free_getenv(override); + override = NULL; + } +#endif + } + + if (location == NULL) { + loader_log( + inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Can't get manifest files with NULL location, env_override=%s", + env_override); + return; + } + +#if defined(_WIN32) + list_is_dirs = (is_layer && override != NULL) ? true : false; +#else + list_is_dirs = (override == NULL || is_layer) ? true : false; +#endif + // Make a copy of the input we are using so it is not modified + // Also handle getting the location(s) from registry on Windows + if (override == NULL) { + loc = loader_stack_alloc(strlen(location) + 1); + if (loc == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't get manifest files"); + return; + } + strcpy(loc, location); +#if defined(_WIN32) + loc = loader_get_registry_files(inst, loc); + if (loc == NULL) { + if (!is_layer) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Registry lookup failed can't get ICD manifest " + "files, do you have a Vulkan driver installed"); + } else { + // warning only for layers + loader_log( + inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Registry lookup failed can't get layer manifest files"); + } + return; + } +#endif + } else { + loc = loader_stack_alloc(strlen(override) + 1); + if (loc == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't get manifest files"); + return; + } + strcpy(loc, override); + loader_free_getenv(override); + } + + // Print out the paths being searched if debugging is enabled + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Searching the following paths for manifest files: %s\n", loc); + + file = loc; + while (*file) { + next_file = loader_get_next_path(file); + if (list_is_dirs) { + sysdir = opendir(file); + name = NULL; + if (sysdir) { + dent = readdir(sysdir); + if (dent == NULL) + break; + name = &(dent->d_name[0]); + loader_get_fullpath(name, file, sizeof(full_path), full_path); + name = full_path; + } + } else { +#if defined(_WIN32) + name = file; +#else + // only Linux has relative paths + char *dir; + // make a copy of location so it isn't modified + dir = loader_stack_alloc(strlen(loc) + 1); + if (dir == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't get manifest files"); + return; + } + strcpy(dir, loc); + + loader_get_fullpath(file, dir, sizeof(full_path), full_path); + + name = full_path; +#endif + } + while (name) { + /* Look for files ending with ".json" suffix */ + uint32_t nlen = (uint32_t)strlen(name); + const char *suf = name + nlen - 5; + if ((nlen > 5) && !strncmp(suf, ".json", 5)) { + if (out_files->count == 0) { + out_files->filename_list = + loader_heap_alloc(inst, alloced_count * sizeof(char *), + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + } else if (out_files->count == alloced_count) { + out_files->filename_list = + loader_heap_realloc(inst, out_files->filename_list, + alloced_count * sizeof(char *), + alloced_count * sizeof(char *) * 2, + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + alloced_count *= 2; + } + if (out_files->filename_list == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't alloc manifest file list"); + return; + } + out_files->filename_list[out_files->count] = loader_heap_alloc( + inst, strlen(name) + 1, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND); + if (out_files->filename_list[out_files->count] == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Out of memory can't get manifest files"); + return; + } + strcpy(out_files->filename_list[out_files->count], name); + out_files->count++; + } else if (!list_is_dirs) { + loader_log( + inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Skipping manifest file %s, file name must end in .json", + name); + } + if (list_is_dirs) { + dent = readdir(sysdir); + if (dent == NULL) + break; + name = &(dent->d_name[0]); + loader_get_fullpath(name, file, sizeof(full_path), full_path); + name = full_path; + } else { + break; + } + } + if (sysdir) + closedir(sysdir); + file = next_file; + } + return; +} + +void loader_init_icd_lib_list() {} + +void loader_destroy_icd_lib_list() {} +/** + * Try to find the Vulkan ICD driver(s). + * + * This function scans the default system loader path(s) or path + * specified by the \c VK_ICD_FILENAMES environment variable in + * order to find loadable VK ICDs manifest files. From these + * manifest files it finds the ICD libraries. + * + * \returns + * a list of icds that were discovered + */ +void loader_icd_scan(const struct loader_instance *inst, + struct loader_icd_libs *icds) { + char *file_str; + struct loader_manifest_files manifest_files; + + loader_scanned_icd_init(inst, icds); + // Get a list of manifest files for ICDs + loader_get_manifest_files(inst, "VK_ICD_FILENAMES", false, + DEFAULT_VK_DRIVERS_INFO, &manifest_files); + if (manifest_files.count == 0) + return; + loader_platform_thread_lock_mutex(&loader_json_lock); + for (uint32_t i = 0; i < manifest_files.count; i++) { + file_str = manifest_files.filename_list[i]; + if (file_str == NULL) + continue; + + cJSON *json; + json = loader_get_json(inst, file_str); + if (!json) + continue; + cJSON *item, *itemICD; + item = cJSON_GetObjectItem(json, "file_format_version"); + if (item == NULL) { + loader_platform_thread_unlock_mutex(&loader_json_lock); + return; + } + char *file_vers = cJSON_Print(item); + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "Found manifest file %s, version %s", file_str, file_vers); + if (strcmp(file_vers, "\"1.0.0\"") != 0) + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Unexpected manifest file version (expected 1.0.0), may " + "cause errors"); + loader_tls_heap_free(file_vers); + itemICD = cJSON_GetObjectItem(json, "ICD"); + if (itemICD != NULL) { + item = cJSON_GetObjectItem(itemICD, "library_path"); + if (item != NULL) { + char *temp = cJSON_Print(item); + if (!temp || strlen(temp) == 0) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Can't find \"library_path\" in ICD JSON file " + "%s, skipping", + file_str); + loader_tls_heap_free(temp); + loader_heap_free(inst, file_str); + cJSON_Delete(json); + continue; + } + // strip out extra quotes + temp[strlen(temp) - 1] = '\0'; + char *library_path = loader_stack_alloc(strlen(temp) + 1); + strcpy(library_path, &temp[1]); + loader_tls_heap_free(temp); + if (!library_path || strlen(library_path) == 0) { + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Can't find \"library_path\" in ICD JSON file " + "%s, skipping", + file_str); + loader_heap_free(inst, file_str); + cJSON_Delete(json); + continue; + } + char fullpath[MAX_STRING_SIZE]; + // Print out the paths being searched if debugging is enabled + loader_log( + inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Searching for ICD drivers named %s default dir %s\n", + library_path, DEFAULT_VK_DRIVERS_PATH); + if (loader_platform_is_path(library_path)) { + // a relative or absolute path + char *name_copy = loader_stack_alloc(strlen(file_str) + 1); + char *rel_base; + strcpy(name_copy, file_str); + rel_base = loader_platform_dirname(name_copy); + loader_expand_path(library_path, rel_base, sizeof(fullpath), + fullpath); + } else { + // a filename which is assumed in a system directory + loader_get_fullpath(library_path, DEFAULT_VK_DRIVERS_PATH, + sizeof(fullpath), fullpath); + } + + uint32_t vers = 0; + item = cJSON_GetObjectItem(itemICD, "api_version"); + if (item != NULL) { + temp = cJSON_Print(item); + vers = loader_make_version(temp); + loader_tls_heap_free(temp); + } + loader_scanned_icd_add(inst, icds, fullpath, vers); + } else + loader_log(inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Can't find \"library_path\" object in ICD JSON " + "file %s, skipping", + file_str); + } else + loader_log( + inst, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, + "Can't find \"ICD\" object in ICD JSON file %s, skipping", + file_str); + + loader_heap_free(inst, file_str); + cJSON_Delete(json); + } + loader_heap_free(inst, manifest_files.filename_list); + loader_platform_thread_unlock_mutex(&loader_json_lock); +} + +void loader_layer_scan(const struct loader_instance *inst, + struct loader_layer_list *instance_layers, + struct loader_layer_list *device_layers) { + char *file_str; + struct loader_manifest_files + manifest_files[2]; // [0] = explicit, [1] = implicit + cJSON *json; + uint32_t i; + uint32_t implicit; + + // Get a list of manifest files for explicit layers + loader_get_manifest_files(inst, LAYERS_PATH_ENV, true, + DEFAULT_VK_ELAYERS_INFO, &manifest_files[0]); + // Pass NULL for environment variable override - implicit layers are not + // overridden by LAYERS_PATH_ENV + loader_get_manifest_files(inst, NULL, true, DEFAULT_VK_ILAYERS_INFO, + &manifest_files[1]); + if (manifest_files[0].count == 0 && manifest_files[1].count == 0) + return; + +#if 0 // TODO + /** + * We need a list of the layer libraries, not just a list of + * the layer properties (a layer library could expose more than + * one layer property). This list of scanned layers would be + * used to check for global and physicaldevice layer properties. + */ + if (!loader_init_layer_library_list(&loader.scanned_layer_libraries)) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Alloc for layer list failed: %s line: %d", __FILE__, __LINE__); + return; + } +#endif + + /* cleanup any previously scanned libraries */ + loader_delete_layer_properties(inst, instance_layers); + loader_delete_layer_properties(inst, device_layers); + + loader_platform_thread_lock_mutex(&loader_json_lock); + for (implicit = 0; implicit < 2; implicit++) { + for (i = 0; i < manifest_files[implicit].count; i++) { + file_str = manifest_files[implicit].filename_list[i]; + if (file_str == NULL) + continue; + + // parse file into JSON struct + json = loader_get_json(inst, file_str); + if (!json) { + continue; + } + + // TODO error if device layers expose instance_extensions + // TODO error if instance layers expose device extensions + loader_add_layer_properties(inst, instance_layers, device_layers, + json, (implicit == 1), file_str); + + loader_heap_free(inst, file_str); + cJSON_Delete(json); + } + } + if (manifest_files[0].count != 0) + loader_heap_free(inst, manifest_files[0].filename_list); + + if (manifest_files[1].count != 0) + loader_heap_free(inst, manifest_files[1].filename_list); + + // add a meta layer for validation if the validation layers are all present + loader_add_layer_property_meta( + inst, sizeof(std_validation_names) / sizeof(std_validation_names[0]), + std_validation_names, instance_layers, device_layers); + + loader_platform_thread_unlock_mutex(&loader_json_lock); +} + +static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL +loader_gpa_instance_internal(VkInstance inst, const char *pName) { + if (!strcmp(pName, "vkGetInstanceProcAddr")) + return (void *)loader_gpa_instance_internal; + if (!strcmp(pName, "vkCreateInstance")) + return (void *)loader_CreateInstance; + if (!strcmp(pName, "vkCreateDevice")) + return (void *)loader_create_device_terminator; + + // inst is not wrapped + if (inst == VK_NULL_HANDLE) { + return NULL; + } + VkLayerInstanceDispatchTable *disp_table = + *(VkLayerInstanceDispatchTable **)inst; + void *addr; + + if (disp_table == NULL) + return NULL; + + addr = loader_lookup_instance_dispatch_table(disp_table, pName); + if (addr) { + return addr; + } + + if (disp_table->GetInstanceProcAddr == NULL) { + return NULL; + } + return disp_table->GetInstanceProcAddr(inst, pName); +} + +/** + * Initialize device_ext dispatch table entry as follows: + * If dev == NULL find all logical devices created within this instance and + * init the entry (given by idx) in the ext dispatch table. + * If dev != NULL only initialize the entry in the given dev's dispatch table. + * The initialization value is gotten by calling down the device chain with + * GDPA. + * If GDPA returns NULL then don't initialize the dispatch table entry. + */ +static void loader_init_dispatch_dev_ext_entry(struct loader_instance *inst, + struct loader_device *dev, + uint32_t idx, + const char *funcName) + +{ + void *gdpa_value; + if (dev != NULL) { + gdpa_value = dev->loader_dispatch.core_dispatch.GetDeviceProcAddr( + dev->device, funcName); + if (gdpa_value != NULL) + dev->loader_dispatch.ext_dispatch.DevExt[idx] = + (PFN_vkDevExt)gdpa_value; + } else { + for (uint32_t i = 0; i < inst->total_icd_count; i++) { + struct loader_icd *icd = &inst->icds[i]; + struct loader_device *dev = icd->logical_device_list; + while (dev) { + gdpa_value = + dev->loader_dispatch.core_dispatch.GetDeviceProcAddr( + dev->device, funcName); + if (gdpa_value != NULL) + dev->loader_dispatch.ext_dispatch.DevExt[idx] = + (PFN_vkDevExt)gdpa_value; + dev = dev->next; + } + } + } +} + +/** + * Find all dev extension in the hash table and initialize the dispatch table + * for dev for each of those extension entrypoints found in hash table. + + */ +static void loader_init_dispatch_dev_ext(struct loader_instance *inst, + struct loader_device *dev) { + for (uint32_t i = 0; i < MAX_NUM_DEV_EXTS; i++) { + if (inst->disp_hash[i].func_name != NULL) + loader_init_dispatch_dev_ext_entry(inst, dev, i, + inst->disp_hash[i].func_name); + } +} + +static bool loader_check_icds_for_address(struct loader_instance *inst, + const char *funcName) { + struct loader_icd *icd; + icd = inst->icds; + while (icd) { + if (icd->this_icd_lib->GetInstanceProcAddr(icd->instance, funcName)) + // this icd supports funcName + return true; + icd = icd->next; + } + + return false; +} + +static void loader_free_dev_ext_table(struct loader_instance *inst) { + for (uint32_t i = 0; i < MAX_NUM_DEV_EXTS; i++) { + loader_heap_free(inst, inst->disp_hash[i].func_name); + loader_heap_free(inst, inst->disp_hash[i].list.index); + } + memset(inst->disp_hash, 0, sizeof(inst->disp_hash)); +} + +static bool loader_add_dev_ext_table(struct loader_instance *inst, + uint32_t *ptr_idx, const char *funcName) { + uint32_t i; + uint32_t idx = *ptr_idx; + struct loader_dispatch_hash_list *list = &inst->disp_hash[idx].list; + + if (!inst->disp_hash[idx].func_name) { + // no entry here at this idx, so use it + assert(list->capacity == 0); + inst->disp_hash[idx].func_name = (char *)loader_heap_alloc( + inst, strlen(funcName) + 1, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (inst->disp_hash[idx].func_name == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader_add_dev_ext_table() can't allocate memory for " + "func_name"); + return false; + } + strncpy(inst->disp_hash[idx].func_name, funcName, strlen(funcName) + 1); + return true; + } + + // check for enough capacity + if (list->capacity == 0) { + list->index = loader_heap_alloc(inst, 8 * sizeof(*(list->index)), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (list->index == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader_add_dev_ext_table() can't allocate list memory"); + return false; + } + list->capacity = 8 * sizeof(*(list->index)); + } else if (list->capacity < (list->count + 1) * sizeof(*(list->index))) { + list->index = loader_heap_realloc(inst, list->index, list->capacity, + list->capacity * 2, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (list->index == NULL) { + loader_log( + inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader_add_dev_ext_table() can't reallocate list memory"); + return false; + } + list->capacity *= 2; + } + + // find an unused index in the hash table and use it + i = (idx + 1) % MAX_NUM_DEV_EXTS; + do { + if (!inst->disp_hash[i].func_name) { + assert(inst->disp_hash[i].list.capacity == 0); + inst->disp_hash[i].func_name = + (char *)loader_heap_alloc(inst, strlen(funcName) + 1, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (inst->disp_hash[i].func_name == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader_add_dev_ext_table() can't rallocate " + "func_name memory"); + return false; + } + strncpy(inst->disp_hash[i].func_name, funcName, + strlen(funcName) + 1); + list->index[list->count] = i; + list->count++; + *ptr_idx = i; + return true; + } + i = (i + 1) % MAX_NUM_DEV_EXTS; + } while (i != idx); + + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader_add_dev_ext_table() couldn't insert into hash table; is " + "it full?"); + return false; +} + +static bool loader_name_in_dev_ext_table(struct loader_instance *inst, + uint32_t *idx, const char *funcName) { + uint32_t alt_idx; + if (inst->disp_hash[*idx].func_name && + !strcmp(inst->disp_hash[*idx].func_name, funcName)) + return true; + + // funcName wasn't at the primary spot in the hash table + // search the list of secondary locations (shallow search, not deep search) + for (uint32_t i = 0; i < inst->disp_hash[*idx].list.count; i++) { + alt_idx = inst->disp_hash[*idx].list.index[i]; + if (!strcmp(inst->disp_hash[*idx].func_name, funcName)) { + *idx = alt_idx; + return true; + } + } + + return false; +} + +/** + * This function returns generic trampoline code address for unknown entry + * points. + * Presumably, these unknown entry points (as given by funcName) are device + * extension entrypoints. A hash table is used to keep a list of unknown entry + * points and their mapping to the device extension dispatch table + * (struct loader_dev_ext_dispatch_table). + * \returns + * For a given entry point string (funcName), if an existing mapping is found + * the + * trampoline address for that mapping is returned. Otherwise, this unknown + * entry point + * has not been seen yet. Next check if a layer or ICD supports it. If so then + * a + * new entry in the hash table is initialized and that trampoline address for + * the new entry is returned. Null is returned if the hash table is full or + * if no discovered layer or ICD returns a non-NULL GetProcAddr for it. + */ +void *loader_dev_ext_gpa(struct loader_instance *inst, const char *funcName) { + uint32_t idx; + uint32_t seed = 0; + + idx = murmurhash(funcName, strlen(funcName), seed) % MAX_NUM_DEV_EXTS; + + if (loader_name_in_dev_ext_table(inst, &idx, funcName)) + // found funcName already in hash + return loader_get_dev_ext_trampoline(idx); + + // Check if funcName is supported in either ICDs or a layer library + if (!loader_check_icds_for_address(inst, funcName)) { + // TODO Add check in layer libraries for support of address + // if support found in layers continue on + return NULL; + } + + if (loader_add_dev_ext_table(inst, &idx, funcName)) { + // successfully added new table entry + // init any dev dispatch table entrys as needed + loader_init_dispatch_dev_ext_entry(inst, NULL, idx, funcName); + return loader_get_dev_ext_trampoline(idx); + } + + return NULL; +} + +struct loader_instance *loader_get_instance(const VkInstance instance) { + /* look up the loader_instance in our list by comparing dispatch tables, as + * there is no guarantee the instance is still a loader_instance* after any + * layers which wrap the instance object. + */ + const VkLayerInstanceDispatchTable *disp; + struct loader_instance *ptr_instance = NULL; + disp = loader_get_instance_dispatch(instance); + for (struct loader_instance *inst = loader.instances; inst; + inst = inst->next) { + if (inst->disp == disp) { + ptr_instance = inst; + break; + } + } + return ptr_instance; +} + +static loader_platform_dl_handle +loader_add_layer_lib(const struct loader_instance *inst, const char *chain_type, + struct loader_layer_properties *layer_prop) { + struct loader_lib_info *new_layer_lib_list, *my_lib; + size_t new_alloc_size; + /* + * TODO: We can now track this information in the + * scanned_layer_libraries list. + */ + for (uint32_t i = 0; i < loader.loaded_layer_lib_count; i++) { + if (strcmp(loader.loaded_layer_lib_list[i].lib_name, + layer_prop->lib_name) == 0) { + /* Have already loaded this library, just increment ref count */ + loader.loaded_layer_lib_list[i].ref_count++; + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "%s Chain: Increment layer reference count for layer " + "library %s", + chain_type, layer_prop->lib_name); + return loader.loaded_layer_lib_list[i].lib_handle; + } + } + + /* Haven't seen this library so load it */ + new_alloc_size = 0; + if (loader.loaded_layer_lib_capacity == 0) + new_alloc_size = 8 * sizeof(struct loader_lib_info); + else if (loader.loaded_layer_lib_capacity <= + loader.loaded_layer_lib_count * sizeof(struct loader_lib_info)) + new_alloc_size = loader.loaded_layer_lib_capacity * 2; + + if (new_alloc_size) { + new_layer_lib_list = loader_heap_realloc( + inst, loader.loaded_layer_lib_list, + loader.loaded_layer_lib_capacity, new_alloc_size, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (!new_layer_lib_list) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader: realloc failed in loader_add_layer_lib"); + return NULL; + } + loader.loaded_layer_lib_capacity = new_alloc_size; + loader.loaded_layer_lib_list = new_layer_lib_list; + } else + new_layer_lib_list = loader.loaded_layer_lib_list; + my_lib = &new_layer_lib_list[loader.loaded_layer_lib_count]; + + strncpy(my_lib->lib_name, layer_prop->lib_name, sizeof(my_lib->lib_name)); + my_lib->lib_name[sizeof(my_lib->lib_name) - 1] = '\0'; + my_lib->ref_count = 0; + my_lib->lib_handle = NULL; + + if ((my_lib->lib_handle = loader_platform_open_library(my_lib->lib_name)) == + NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + loader_platform_open_library_error(my_lib->lib_name)); + return NULL; + } else { + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Chain: %s: Loading layer library %s", chain_type, + layer_prop->lib_name); + } + loader.loaded_layer_lib_count++; + my_lib->ref_count++; + + return my_lib->lib_handle; +} + +static void +loader_remove_layer_lib(struct loader_instance *inst, + struct loader_layer_properties *layer_prop) { + uint32_t idx = loader.loaded_layer_lib_count; + struct loader_lib_info *new_layer_lib_list, *my_lib = NULL; + + for (uint32_t i = 0; i < loader.loaded_layer_lib_count; i++) { + if (strcmp(loader.loaded_layer_lib_list[i].lib_name, + layer_prop->lib_name) == 0) { + /* found matching library */ + idx = i; + my_lib = &loader.loaded_layer_lib_list[i]; + break; + } + } + + if (idx == loader.loaded_layer_lib_count) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Unable to unref library %s", layer_prop->lib_name); + return; + } + + if (my_lib) { + my_lib->ref_count--; + if (my_lib->ref_count > 0) { + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Decrement reference count for layer library %s", + layer_prop->lib_name); + return; + } + } + loader_platform_close_library(my_lib->lib_handle); + loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Unloading layer library %s", layer_prop->lib_name); + + /* Need to remove unused library from list */ + new_layer_lib_list = + loader_heap_alloc(inst, loader.loaded_layer_lib_capacity, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (!new_layer_lib_list) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "loader: heap alloc failed loader_remove_layer_library"); + return; + } + + if (idx > 0) { + /* Copy records before idx */ + memcpy(new_layer_lib_list, &loader.loaded_layer_lib_list[0], + sizeof(struct loader_lib_info) * idx); + } + if (idx < (loader.loaded_layer_lib_count - 1)) { + /* Copy records after idx */ + memcpy(&new_layer_lib_list[idx], &loader.loaded_layer_lib_list[idx + 1], + sizeof(struct loader_lib_info) * + (loader.loaded_layer_lib_count - idx - 1)); + } + + loader_heap_free(inst, loader.loaded_layer_lib_list); + loader.loaded_layer_lib_count--; + loader.loaded_layer_lib_list = new_layer_lib_list; +} + +/** + * Go through the search_list and find any layers which match type. If layer + * type match is found in then add it to ext_list. + */ +static void +loader_add_layer_implicit(const struct loader_instance *inst, + const enum layer_type type, + struct loader_layer_list *list, + const struct loader_layer_list *search_list) { + bool enable; + char *env_value; + uint32_t i; + for (i = 0; i < search_list->count; i++) { + const struct loader_layer_properties *prop = &search_list->list[i]; + if (prop->type & type) { + /* Found an implicit layer, see if it should be enabled */ + enable = false; + + // if no enable_environment variable is specified, this implicit + // layer + // should always be enabled. Otherwise check if the variable is set + if (prop->enable_env_var.name[0] == 0) { + enable = true; + } else { + env_value = loader_getenv(prop->enable_env_var.name); + if (env_value && !strcmp(prop->enable_env_var.value, env_value)) + enable = true; + loader_free_getenv(env_value); + } + + // disable_environment has priority, i.e. if both enable and disable + // environment variables are set, the layer is disabled. Implicit + // layers + // are required to have a disable_environment variables + env_value = loader_getenv(prop->disable_env_var.name); + if (env_value) + enable = false; + loader_free_getenv(env_value); + + if (enable) + loader_add_to_layer_list(inst, list, 1, prop); + } + } +} + +/** + * Get the layer name(s) from the env_name environment variable. If layer + * is found in search_list then add it to layer_list. But only add it to + * layer_list if type matches. + */ +static void loader_add_layer_env(const struct loader_instance *inst, + const enum layer_type type, + const char *env_name, + struct loader_layer_list *layer_list, + const struct loader_layer_list *search_list) { + char *layerEnv; + char *next, *name; + + layerEnv = loader_getenv(env_name); + if (layerEnv == NULL) { + return; + } + name = loader_stack_alloc(strlen(layerEnv) + 1); + if (name == NULL) { + return; + } + strcpy(name, layerEnv); + + loader_free_getenv(layerEnv); + + while (name && *name) { + next = loader_get_next_path(name); + if (!strcmp(std_validation_str, name)) { + /* add meta list of layers + don't attempt to remove duplicate layers already added by app or + env var + */ + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "Expanding meta layer %s found in environment variable", + std_validation_str); + for (uint32_t i = 0; i < sizeof(std_validation_names) / + sizeof(std_validation_names[0]); + i++) { + loader_find_layer_name_add_list(inst, std_validation_names[i], + type, search_list, layer_list); + } + } else { + loader_find_layer_name_add_list(inst, name, type, search_list, + layer_list); + } + name = next; + } + + return; +} + +void loader_deactivate_instance_layers(struct loader_instance *instance) { + /* Create instance chain of enabled layers */ + for (uint32_t i = 0; i < instance->activated_layer_list.count; i++) { + struct loader_layer_properties *layer_prop = + &instance->activated_layer_list.list[i]; + + loader_remove_layer_lib(instance, layer_prop); + } + loader_destroy_layer_list(instance, &instance->activated_layer_list); +} + +VkResult +loader_enable_instance_layers(struct loader_instance *inst, + const VkInstanceCreateInfo *pCreateInfo, + const struct loader_layer_list *instance_layers) { + VkResult err; + + assert(inst && "Cannot have null instance"); + + if (!loader_init_layer_list(inst, &inst->activated_layer_list)) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to alloc Instance activated layer list"); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + /* Add any implicit layers first */ + loader_add_layer_implicit(inst, VK_LAYER_TYPE_INSTANCE_IMPLICIT, + &inst->activated_layer_list, instance_layers); + + /* Add any layers specified via environment variable next */ + loader_add_layer_env(inst, VK_LAYER_TYPE_INSTANCE_EXPLICIT, + "VK_INSTANCE_LAYERS", &inst->activated_layer_list, + instance_layers); + + /* Add layers specified by the application */ + err = loader_add_layer_names_to_list( + inst, &inst->activated_layer_list, pCreateInfo->enabledLayerCount, + pCreateInfo->ppEnabledLayerNames, instance_layers); + + return err; +} + +/* + * Given the list of layers to activate in the loader_instance + * structure. This function will add a VkLayerInstanceCreateInfo + * structure to the VkInstanceCreateInfo.pNext pointer. + * Each activated layer will have it's own VkLayerInstanceLink + * structure that tells the layer what Get*ProcAddr to call to + * get function pointers to the next layer down. + * Once the chain info has been created this function will + * execute the CreateInstance call chain. Each layer will + * then have an opportunity in it's CreateInstance function + * to setup it's dispatch table when the lower layer returns + * successfully. + * Each layer can wrap or not-wrap the returned VkInstance object + * as it sees fit. + * The instance chain is terminated by a loader function + * that will call CreateInstance on all available ICD's and + * cache those VkInstance objects for future use. + */ +VkResult loader_create_instance_chain(const VkInstanceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + struct loader_instance *inst, + VkInstance *created_instance) { + uint32_t activated_layers = 0; + VkLayerInstanceCreateInfo chain_info; + VkLayerInstanceLink *layer_instance_link_info = NULL; + VkInstanceCreateInfo loader_create_info; + VkResult res; + + PFN_vkGetInstanceProcAddr nextGIPA = loader_gpa_instance_internal; + PFN_vkGetInstanceProcAddr fpGIPA = loader_gpa_instance_internal; + + memcpy(&loader_create_info, pCreateInfo, sizeof(VkInstanceCreateInfo)); + + if (inst->activated_layer_list.count > 0) { + + chain_info.u.pLayerInfo = NULL; + chain_info.pNext = pCreateInfo->pNext; + chain_info.sType = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO; + chain_info.function = VK_LAYER_LINK_INFO; + loader_create_info.pNext = &chain_info; + + layer_instance_link_info = loader_stack_alloc( + sizeof(VkLayerInstanceLink) * inst->activated_layer_list.count); + if (!layer_instance_link_info) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to alloc Instance objects for layer"); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + /* Create instance chain of enabled layers */ + for (int32_t i = inst->activated_layer_list.count - 1; i >= 0; i--) { + struct loader_layer_properties *layer_prop = + &inst->activated_layer_list.list[i]; + loader_platform_dl_handle lib_handle; + + lib_handle = loader_add_layer_lib(inst, "instance", layer_prop); + if (!lib_handle) + continue; + if ((fpGIPA = layer_prop->functions.get_instance_proc_addr) == + NULL) { + if (layer_prop->functions.str_gipa == NULL || + strlen(layer_prop->functions.str_gipa) == 0) { + fpGIPA = (PFN_vkGetInstanceProcAddr) + loader_platform_get_proc_address( + lib_handle, "vkGetInstanceProcAddr"); + layer_prop->functions.get_instance_proc_addr = fpGIPA; + } else + fpGIPA = (PFN_vkGetInstanceProcAddr) + loader_platform_get_proc_address( + lib_handle, layer_prop->functions.str_gipa); + if (!fpGIPA) { + loader_log( + inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to find vkGetInstanceProcAddr in layer %s", + layer_prop->lib_name); + continue; + } + } + + layer_instance_link_info[activated_layers].pNext = + chain_info.u.pLayerInfo; + layer_instance_link_info[activated_layers] + .pfnNextGetInstanceProcAddr = nextGIPA; + chain_info.u.pLayerInfo = + &layer_instance_link_info[activated_layers]; + nextGIPA = fpGIPA; + + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "Insert instance layer %s (%s)", + layer_prop->info.layerName, layer_prop->lib_name); + + activated_layers++; + } + } + + PFN_vkCreateInstance fpCreateInstance = + (PFN_vkCreateInstance)nextGIPA(*created_instance, "vkCreateInstance"); + if (fpCreateInstance) { + VkLayerInstanceCreateInfo instance_create_info; + + instance_create_info.sType = + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO; + instance_create_info.function = VK_LAYER_INSTANCE_INFO; + + instance_create_info.u.instanceInfo.instance_info = inst; + instance_create_info.u.instanceInfo.pfnNextGetInstanceProcAddr = + nextGIPA; + + instance_create_info.pNext = loader_create_info.pNext; + loader_create_info.pNext = &instance_create_info; + + res = + fpCreateInstance(&loader_create_info, pAllocator, created_instance); + } else { + // Couldn't find CreateInstance function! + res = VK_ERROR_INITIALIZATION_FAILED; + } + + if (res != VK_SUCCESS) { + // TODO: Need to clean up here + } else { + loader_init_instance_core_dispatch_table(inst->disp, nextGIPA, + *created_instance); + } + + return res; +} + +void loader_activate_instance_layer_extensions(struct loader_instance *inst, + VkInstance created_inst) { + + loader_init_instance_extension_dispatch_table( + inst->disp, inst->disp->GetInstanceProcAddr, created_inst); +} + +static VkResult +loader_enable_device_layers(const struct loader_instance *inst, + struct loader_icd *icd, + struct loader_layer_list *activated_layer_list, + const VkDeviceCreateInfo *pCreateInfo, + const struct loader_layer_list *device_layers) + +{ + VkResult err; + + assert(activated_layer_list && "Cannot have null output layer list"); + + if (activated_layer_list->list == NULL || + activated_layer_list->capacity == 0) { + loader_init_layer_list(inst, activated_layer_list); + } + + if (activated_layer_list->list == NULL) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to alloc device activated layer list"); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + /* Add any implicit layers first */ + loader_add_layer_implicit(inst, VK_LAYER_TYPE_DEVICE_IMPLICIT, + activated_layer_list, device_layers); + + /* Add any layers specified via environment variable next */ + loader_add_layer_env(inst, VK_LAYER_TYPE_DEVICE_EXPLICIT, + "VK_DEVICE_LAYERS", activated_layer_list, + device_layers); + + /* Add layers specified by the application */ + err = loader_add_layer_names_to_list( + inst, activated_layer_list, pCreateInfo->enabledLayerCount, + pCreateInfo->ppEnabledLayerNames, device_layers); + + return err; +} + +VKAPI_ATTR VkResult VKAPI_CALL +loader_create_device_terminator(VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDevice *pDevice) { + struct loader_physical_device *phys_dev; + phys_dev = loader_get_physical_device(physicalDevice); + + VkLayerDeviceCreateInfo *chain_info = + (VkLayerDeviceCreateInfo *)pCreateInfo->pNext; + while (chain_info && + !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO && + chain_info->function == VK_LAYER_DEVICE_INFO)) { + chain_info = (VkLayerDeviceCreateInfo *)chain_info->pNext; + } + assert(chain_info != NULL); + + struct loader_device *dev = + (struct loader_device *)chain_info->u.deviceInfo.device_info; + PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = + chain_info->u.deviceInfo.pfnNextGetInstanceProcAddr; + PFN_vkCreateDevice fpCreateDevice = + (PFN_vkCreateDevice)fpGetInstanceProcAddr(phys_dev->this_icd->instance, + "vkCreateDevice"); + if (fpCreateDevice == NULL) { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VkDeviceCreateInfo localCreateInfo; + memcpy(&localCreateInfo, pCreateInfo, sizeof(localCreateInfo)); + localCreateInfo.pNext = loader_strip_create_extensions(pCreateInfo->pNext); + + /* + * NOTE: Need to filter the extensions to only those + * supported by the ICD. + * No ICD will advertise support for layers. An ICD + * library could support a layer, but it would be + * independent of the actual ICD, just in the same library. + */ + char **filtered_extension_names = NULL; + filtered_extension_names = + loader_stack_alloc(pCreateInfo->enabledExtensionCount * sizeof(char *)); + if (!filtered_extension_names) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + localCreateInfo.enabledLayerCount = 0; + localCreateInfo.ppEnabledLayerNames = NULL; + + localCreateInfo.enabledExtensionCount = 0; + localCreateInfo.ppEnabledExtensionNames = + (const char *const *)filtered_extension_names; + + for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { + const char *extension_name = pCreateInfo->ppEnabledExtensionNames[i]; + VkExtensionProperties *prop = get_extension_property( + extension_name, &phys_dev->device_extension_cache); + if (prop) { + filtered_extension_names[localCreateInfo.enabledExtensionCount] = + (char *)extension_name; + localCreateInfo.enabledExtensionCount++; + } + } + + VkDevice localDevice; + // TODO: Why does fpCreateDevice behave differently than + // this_icd->CreateDevice? + // VkResult res = fpCreateDevice(phys_dev->phys_dev, &localCreateInfo, + // pAllocator, &localDevice); + VkResult res = phys_dev->this_icd->CreateDevice( + phys_dev->phys_dev, &localCreateInfo, pAllocator, &localDevice); + + if (res != VK_SUCCESS) { + return res; + } + + *pDevice = localDevice; + + /* Init dispatch pointer in new device object */ + loader_init_dispatch(*pDevice, &dev->loader_dispatch); + + return res; +} + +VkResult loader_create_device_chain(VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + struct loader_instance *inst, + struct loader_icd *icd, + struct loader_device *dev) { + uint32_t activated_layers = 0; + VkLayerDeviceLink *layer_device_link_info; + VkLayerDeviceCreateInfo chain_info; + VkLayerDeviceCreateInfo device_info; + VkDeviceCreateInfo loader_create_info; + VkResult res; + + PFN_vkGetDeviceProcAddr fpGDPA, nextGDPA = icd->GetDeviceProcAddr; + PFN_vkGetInstanceProcAddr fpGIPA, nextGIPA = loader_gpa_instance_internal; + + memcpy(&loader_create_info, pCreateInfo, sizeof(VkDeviceCreateInfo)); + + chain_info.sType = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO; + chain_info.function = VK_LAYER_LINK_INFO; + chain_info.u.pLayerInfo = NULL; + chain_info.pNext = pCreateInfo->pNext; + + layer_device_link_info = loader_stack_alloc( + sizeof(VkLayerDeviceLink) * dev->activated_layer_list.count); + if (!layer_device_link_info) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to alloc Device objects for layer"); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + /* + * This structure is used by loader_create_device_terminator + * so that it can intialize the device dispatch table pointer + * in the device object returned by the ICD. Without this + * structure the code wouldn't know where the loader's device_info + * structure is located. + */ + device_info.sType = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO; + device_info.function = VK_LAYER_DEVICE_INFO; + device_info.pNext = &chain_info; + device_info.u.deviceInfo.device_info = dev; + device_info.u.deviceInfo.pfnNextGetInstanceProcAddr = + icd->this_icd_lib->GetInstanceProcAddr; + + loader_create_info.pNext = &device_info; + + if (dev->activated_layer_list.count > 0) { + /* Create instance chain of enabled layers */ + for (int32_t i = dev->activated_layer_list.count - 1; i >= 0; i--) { + struct loader_layer_properties *layer_prop = + &dev->activated_layer_list.list[i]; + loader_platform_dl_handle lib_handle; + + lib_handle = loader_add_layer_lib(inst, "device", layer_prop); + if (!lib_handle) + continue; + if ((fpGIPA = layer_prop->functions.get_instance_proc_addr) == + NULL) { + if (layer_prop->functions.str_gipa == NULL || + strlen(layer_prop->functions.str_gipa) == 0) { + fpGIPA = (PFN_vkGetInstanceProcAddr) + loader_platform_get_proc_address( + lib_handle, "vkGetInstanceProcAddr"); + layer_prop->functions.get_instance_proc_addr = fpGIPA; + } else + fpGIPA = (PFN_vkGetInstanceProcAddr) + loader_platform_get_proc_address( + lib_handle, layer_prop->functions.str_gipa); + if (!fpGIPA) { + loader_log( + inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to find vkGetInstanceProcAddr in layer %s", + layer_prop->lib_name); + continue; + } + } + if ((fpGDPA = layer_prop->functions.get_device_proc_addr) == NULL) { + if (layer_prop->functions.str_gdpa == NULL || + strlen(layer_prop->functions.str_gdpa) == 0) { + fpGDPA = (PFN_vkGetDeviceProcAddr) + loader_platform_get_proc_address(lib_handle, + "vkGetDeviceProcAddr"); + layer_prop->functions.get_device_proc_addr = fpGDPA; + } else + fpGDPA = (PFN_vkGetDeviceProcAddr) + loader_platform_get_proc_address( + lib_handle, layer_prop->functions.str_gdpa); + if (!fpGDPA) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Failed to find vkGetDeviceProcAddr in layer %s", + layer_prop->lib_name); + continue; + } + } + + layer_device_link_info[activated_layers].pNext = + chain_info.u.pLayerInfo; + layer_device_link_info[activated_layers] + .pfnNextGetInstanceProcAddr = nextGIPA; + layer_device_link_info[activated_layers].pfnNextGetDeviceProcAddr = + nextGDPA; + chain_info.u.pLayerInfo = &layer_device_link_info[activated_layers]; + nextGIPA = fpGIPA; + nextGDPA = fpGDPA; + + loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0, + "Insert device layer %s (%s)", + layer_prop->info.layerName, layer_prop->lib_name); + + activated_layers++; + } + } + + PFN_vkCreateDevice fpCreateDevice = + (PFN_vkCreateDevice)nextGIPA((VkInstance)inst, "vkCreateDevice"); + if (fpCreateDevice) { + res = fpCreateDevice(physicalDevice, &loader_create_info, pAllocator, + &dev->device); + } else { + // Couldn't find CreateDevice function! + return VK_ERROR_INITIALIZATION_FAILED; + } + + /* Initialize device dispatch table */ + loader_init_device_dispatch_table(&dev->loader_dispatch, nextGDPA, + dev->device); + + return res; +} + +VkResult loader_validate_layers(const struct loader_instance *inst, + const uint32_t layer_count, + const char *const *ppEnabledLayerNames, + const struct loader_layer_list *list) { + struct loader_layer_properties *prop; + + for (uint32_t i = 0; i < layer_count; i++) { + VkStringErrorFlags result = + vk_string_validate(MaxLoaderStringLength, ppEnabledLayerNames[i]); + if (result != VK_STRING_ERROR_NONE) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Loader: Device ppEnabledLayerNames contains string " + "that is too long or is badly formed"); + return VK_ERROR_LAYER_NOT_PRESENT; + } + + prop = loader_get_layer_property(ppEnabledLayerNames[i], list); + if (!prop) { + return VK_ERROR_LAYER_NOT_PRESENT; + } + } + return VK_SUCCESS; +} + +VkResult loader_validate_instance_extensions( + const struct loader_instance *inst, + const struct loader_extension_list *icd_exts, + const struct loader_layer_list *instance_layer, + const VkInstanceCreateInfo *pCreateInfo) { + + VkExtensionProperties *extension_prop; + struct loader_layer_properties *layer_prop; + + for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { + VkStringErrorFlags result = vk_string_validate( + MaxLoaderStringLength, pCreateInfo->ppEnabledExtensionNames[i]); + if (result != VK_STRING_ERROR_NONE) { + loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Loader: Instance ppEnabledExtensionNames contains " + "string that is too long or is badly formed"); + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + + extension_prop = get_extension_property( + pCreateInfo->ppEnabledExtensionNames[i], icd_exts); + + if (extension_prop) { + continue; + } + + extension_prop = NULL; + + /* Not in global list, search layer extension lists */ + for (uint32_t j = 0; j < pCreateInfo->enabledLayerCount; j++) { + layer_prop = loader_get_layer_property( + pCreateInfo->ppEnabledLayerNames[i], instance_layer); + if (!layer_prop) { + /* Should NOT get here, loader_validate_layers + * should have already filtered this case out. + */ + continue; + } + + extension_prop = + get_extension_property(pCreateInfo->ppEnabledExtensionNames[i], + &layer_prop->instance_extension_list); + if (extension_prop) { + /* Found the extension in one of the layers enabled by the app. + */ + break; + } + } + + if (!extension_prop) { + /* Didn't find extension name in any of the global layers, error out + */ + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } + return VK_SUCCESS; +} + +VkResult loader_validate_device_extensions( + struct loader_physical_device *phys_dev, + const struct loader_layer_list *activated_device_layers, + const VkDeviceCreateInfo *pCreateInfo) { + VkExtensionProperties *extension_prop; + struct loader_layer_properties *layer_prop; + + for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { + + VkStringErrorFlags result = vk_string_validate( + MaxLoaderStringLength, pCreateInfo->ppEnabledExtensionNames[i]); + if (result != VK_STRING_ERROR_NONE) { + loader_log(phys_dev->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, + 0, "Loader: Device ppEnabledExtensionNames contains " + "string that is too long or is badly formed"); + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + + const char *extension_name = pCreateInfo->ppEnabledExtensionNames[i]; + extension_prop = get_extension_property( + extension_name, &phys_dev->device_extension_cache); + + if (extension_prop) { + continue; + } + + /* Not in global list, search activated layer extension lists */ + for (uint32_t j = 0; j < activated_device_layers->count; j++) { + layer_prop = &activated_device_layers->list[j]; + + extension_prop = get_dev_extension_property( + extension_name, &layer_prop->device_extension_list); + if (extension_prop) { + /* Found the extension in one of the layers enabled by the app. + */ + break; + } + } + + if (!extension_prop) { + /* Didn't find extension name in any of the device layers, error out + */ + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } + return VK_SUCCESS; +} + +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateInstance(const VkInstanceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkInstance *pInstance) { + struct loader_icd *icd; + VkExtensionProperties *prop; + char **filtered_extension_names = NULL; + VkInstanceCreateInfo icd_create_info; + VkResult res = VK_SUCCESS; + bool success = false; + + VkLayerInstanceCreateInfo *chain_info = + (VkLayerInstanceCreateInfo *)pCreateInfo->pNext; + while ( + chain_info && + !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO && + chain_info->function == VK_LAYER_INSTANCE_INFO)) { + chain_info = (VkLayerInstanceCreateInfo *)chain_info->pNext; + } + assert(chain_info != NULL); + + struct loader_instance *ptr_instance = + (struct loader_instance *)chain_info->u.instanceInfo.instance_info; + memcpy(&icd_create_info, pCreateInfo, sizeof(icd_create_info)); + + icd_create_info.enabledLayerCount = 0; + icd_create_info.ppEnabledLayerNames = NULL; + + // strip off the VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO entries + icd_create_info.pNext = loader_strip_create_extensions(pCreateInfo->pNext); + + /* + * NOTE: Need to filter the extensions to only those + * supported by the ICD. + * No ICD will advertise support for layers. An ICD + * library could support a layer, but it would be + * independent of the actual ICD, just in the same library. + */ + filtered_extension_names = + loader_stack_alloc(pCreateInfo->enabledExtensionCount * sizeof(char *)); + if (!filtered_extension_names) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + icd_create_info.ppEnabledExtensionNames = + (const char *const *)filtered_extension_names; + + for (uint32_t i = 0; i < ptr_instance->icd_libs.count; i++) { + icd = loader_icd_add(ptr_instance, &ptr_instance->icd_libs.list[i]); + if (icd) { + icd_create_info.enabledExtensionCount = 0; + struct loader_extension_list icd_exts; + + loader_log(ptr_instance, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0, + "Build ICD instance extension list"); + // traverse scanned icd list adding non-duplicate extensions to the + // list + loader_init_generic_list(ptr_instance, + (struct loader_generic_list *)&icd_exts, + sizeof(VkExtensionProperties)); + loader_add_instance_extensions( + ptr_instance, + icd->this_icd_lib->EnumerateInstanceExtensionProperties, + icd->this_icd_lib->lib_name, &icd_exts); + + for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { + prop = get_extension_property( + pCreateInfo->ppEnabledExtensionNames[i], &icd_exts); + if (prop) { + filtered_extension_names[icd_create_info + .enabledExtensionCount] = + (char *)pCreateInfo->ppEnabledExtensionNames[i]; + icd_create_info.enabledExtensionCount++; + } + } + + loader_destroy_generic_list( + ptr_instance, (struct loader_generic_list *)&icd_exts); + + res = ptr_instance->icd_libs.list[i].CreateInstance( + &icd_create_info, pAllocator, &(icd->instance)); + if (res == VK_SUCCESS) + success = loader_icd_init_entrys( + icd, icd->instance, + ptr_instance->icd_libs.list[i].GetInstanceProcAddr); + + if (res != VK_SUCCESS || !success) { + ptr_instance->icds = ptr_instance->icds->next; + loader_icd_destroy(ptr_instance, icd); + loader_log(ptr_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "ICD ignored: failed to CreateInstance and find " + "entrypoints with ICD"); + } + } + } + + /* + * If no ICDs were added to instance list and res is unchanged + * from it's initial value, the loader was unable to find + * a suitable ICD. + */ + if (ptr_instance->icds == NULL) { + if (res == VK_SUCCESS) { + return VK_ERROR_INCOMPATIBLE_DRIVER; + } else { + return res; + } + } + + return VK_SUCCESS; +} + +VKAPI_ATTR void VKAPI_CALL +loader_DestroyInstance(VkInstance instance, + const VkAllocationCallbacks *pAllocator) { + struct loader_instance *ptr_instance = loader_instance(instance); + struct loader_icd *icds = ptr_instance->icds; + struct loader_icd *next_icd; + + // Remove this instance from the list of instances: + struct loader_instance *prev = NULL; + struct loader_instance *next = loader.instances; + while (next != NULL) { + if (next == ptr_instance) { + // Remove this instance from the list: + if (prev) + prev->next = next->next; + else + loader.instances = next->next; + break; + } + prev = next; + next = next->next; + } + + while (icds) { + if (icds->instance) { + icds->DestroyInstance(icds->instance, pAllocator); + } + next_icd = icds->next; + icds->instance = VK_NULL_HANDLE; + loader_icd_destroy(ptr_instance, icds); + + icds = next_icd; + } + loader_delete_layer_properties(ptr_instance, + &ptr_instance->device_layer_list); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->instance_layer_list); + loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_libs); + loader_destroy_generic_list( + ptr_instance, (struct loader_generic_list *)&ptr_instance->ext_list); + for (uint32_t i = 0; i < ptr_instance->total_gpu_count; i++) + loader_destroy_generic_list( + ptr_instance, + (struct loader_generic_list *)&ptr_instance->phys_devs[i] + .device_extension_cache); + loader_heap_free(ptr_instance, ptr_instance->phys_devs); + loader_free_dev_ext_table(ptr_instance); +} + +VkResult +loader_init_physical_device_info(struct loader_instance *ptr_instance) { + struct loader_icd *icd; + uint32_t i, j, idx, count = 0; + VkResult res; + struct loader_phys_dev_per_icd *phys_devs; + + ptr_instance->total_gpu_count = 0; + phys_devs = (struct loader_phys_dev_per_icd *)loader_stack_alloc( + sizeof(struct loader_phys_dev_per_icd) * ptr_instance->total_icd_count); + if (!phys_devs) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + icd = ptr_instance->icds; + for (i = 0; i < ptr_instance->total_icd_count; i++) { + assert(icd); + res = icd->EnumeratePhysicalDevices(icd->instance, &phys_devs[i].count, + NULL); + if (res != VK_SUCCESS) + return res; + count += phys_devs[i].count; + icd = icd->next; + } + + ptr_instance->phys_devs = + (struct loader_physical_device *)loader_heap_alloc( + ptr_instance, count * sizeof(struct loader_physical_device), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (!ptr_instance->phys_devs) + return VK_ERROR_OUT_OF_HOST_MEMORY; + + icd = ptr_instance->icds; + + struct loader_physical_device *inst_phys_devs = ptr_instance->phys_devs; + idx = 0; + for (i = 0; i < ptr_instance->total_icd_count; i++) { + assert(icd); + + phys_devs[i].phys_devs = (VkPhysicalDevice *)loader_stack_alloc( + phys_devs[i].count * sizeof(VkPhysicalDevice)); + if (!phys_devs[i].phys_devs) { + loader_heap_free(ptr_instance, ptr_instance->phys_devs); + ptr_instance->phys_devs = NULL; + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + res = icd->EnumeratePhysicalDevices( + icd->instance, &(phys_devs[i].count), phys_devs[i].phys_devs); + if ((res == VK_SUCCESS)) { + ptr_instance->total_gpu_count += phys_devs[i].count; + for (j = 0; j < phys_devs[i].count; j++) { + + // initialize the loader's physicalDevice object + loader_set_dispatch((void *)&inst_phys_devs[idx], + ptr_instance->disp); + inst_phys_devs[idx].this_instance = ptr_instance; + inst_phys_devs[idx].this_icd = icd; + inst_phys_devs[idx].phys_dev = phys_devs[i].phys_devs[j]; + memset(&inst_phys_devs[idx].device_extension_cache, 0, + sizeof(struct loader_extension_list)); + + idx++; + } + } else { + loader_heap_free(ptr_instance, ptr_instance->phys_devs); + ptr_instance->phys_devs = NULL; + return res; + } + + icd = icd->next; + } + + return VK_SUCCESS; +} + +VKAPI_ATTR VkResult VKAPI_CALL +loader_EnumeratePhysicalDevices(VkInstance instance, + uint32_t *pPhysicalDeviceCount, + VkPhysicalDevice *pPhysicalDevices) { + uint32_t i; + uint32_t copy_count = 0; + struct loader_instance *ptr_instance = (struct loader_instance *)instance; + VkResult res = VK_SUCCESS; + + if (ptr_instance->total_gpu_count == 0) { + res = loader_init_physical_device_info(ptr_instance); + } + + *pPhysicalDeviceCount = ptr_instance->total_gpu_count; + if (!pPhysicalDevices) { + return res; + } + + copy_count = (ptr_instance->total_gpu_count < *pPhysicalDeviceCount) + ? ptr_instance->total_gpu_count + : *pPhysicalDeviceCount; + for (i = 0; i < copy_count; i++) { + pPhysicalDevices[i] = (VkPhysicalDevice)&ptr_instance->phys_devs[i]; + } + *pPhysicalDeviceCount = copy_count; + + if (copy_count < ptr_instance->total_gpu_count) { + return VK_INCOMPLETE; + } + + return res; +} + +VKAPI_ATTR void VKAPI_CALL +loader_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties *pProperties) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (icd->GetPhysicalDeviceProperties) + icd->GetPhysicalDeviceProperties(phys_dev->phys_dev, pProperties); +} + +VKAPI_ATTR void VKAPI_CALL loader_GetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, + VkQueueFamilyProperties *pProperties) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (icd->GetPhysicalDeviceQueueFamilyProperties) + icd->GetPhysicalDeviceQueueFamilyProperties( + phys_dev->phys_dev, pQueueFamilyPropertyCount, pProperties); +} + +VKAPI_ATTR void VKAPI_CALL loader_GetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties *pProperties) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (icd->GetPhysicalDeviceMemoryProperties) + icd->GetPhysicalDeviceMemoryProperties(phys_dev->phys_dev, pProperties); +} + +VKAPI_ATTR void VKAPI_CALL +loader_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures *pFeatures) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (icd->GetPhysicalDeviceFeatures) + icd->GetPhysicalDeviceFeatures(phys_dev->phys_dev, pFeatures); +} + +VKAPI_ATTR void VKAPI_CALL +loader_GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties *pFormatInfo) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (icd->GetPhysicalDeviceFormatProperties) + icd->GetPhysicalDeviceFormatProperties(phys_dev->phys_dev, format, + pFormatInfo); +} + +VKAPI_ATTR VkResult VKAPI_CALL loader_GetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, + VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, + VkImageFormatProperties *pImageFormatProperties) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (!icd->GetPhysicalDeviceImageFormatProperties) + return VK_ERROR_INITIALIZATION_FAILED; + + return icd->GetPhysicalDeviceImageFormatProperties( + phys_dev->phys_dev, format, type, tiling, usage, flags, + pImageFormatProperties); +} + +VKAPI_ATTR void VKAPI_CALL loader_GetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, + VkSampleCountFlagBits samples, VkImageUsageFlags usage, + VkImageTiling tiling, uint32_t *pNumProperties, + VkSparseImageFormatProperties *pProperties) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + if (icd->GetPhysicalDeviceSparseImageFormatProperties) + icd->GetPhysicalDeviceSparseImageFormatProperties( + phys_dev->phys_dev, format, type, samples, usage, tiling, + pNumProperties, pProperties); +} + +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateDevice(VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDevice *pDevice) { + struct loader_physical_device *phys_dev; + struct loader_icd *icd; + struct loader_device *dev; + struct loader_instance *inst; + struct loader_layer_list activated_layer_list = {0}; + VkResult res; + + assert(pCreateInfo->queueCreateInfoCount >= 1); + + // TODO this only works for one physical device per instance + // once CreateDevice layer bootstrapping is done via DeviceCreateInfo + // hopefully don't need this anymore in trampoline code + phys_dev = loader_get_physical_device(physicalDevice); + icd = phys_dev->this_icd; + if (!icd) + return VK_ERROR_INITIALIZATION_FAILED; + + inst = phys_dev->this_instance; + + if (!icd->CreateDevice) { + return VK_ERROR_INITIALIZATION_FAILED; + } + + /* validate any app enabled layers are available */ + if (pCreateInfo->enabledLayerCount > 0) { + res = loader_validate_layers(inst, pCreateInfo->enabledLayerCount, + pCreateInfo->ppEnabledLayerNames, + &inst->device_layer_list); + if (res != VK_SUCCESS) { + return res; + } + } + + /* Get the physical device extensions if they haven't been retrieved yet */ + if (phys_dev->device_extension_cache.capacity == 0) { + if (!loader_init_generic_list( + inst, + (struct loader_generic_list *)&phys_dev->device_extension_cache, + sizeof(VkExtensionProperties))) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + res = loader_add_device_extensions( + inst, icd, phys_dev->phys_dev, + phys_dev->this_icd->this_icd_lib->lib_name, + &phys_dev->device_extension_cache); + if (res != VK_SUCCESS) { + return res; + } + } + + /* convert any meta layers to the actual layers makes a copy of layer name*/ + uint32_t saved_layer_count = pCreateInfo->enabledLayerCount; + char **saved_layer_names; + char **saved_layer_ptr; + saved_layer_names = + loader_stack_alloc(sizeof(char *) * pCreateInfo->enabledLayerCount); + for (uint32_t i = 0; i < saved_layer_count; i++) { + saved_layer_names[i] = (char *)pCreateInfo->ppEnabledLayerNames[i]; + } + saved_layer_ptr = (char **)pCreateInfo->ppEnabledLayerNames; + + loader_expand_layer_names( + inst, std_validation_str, + sizeof(std_validation_names) / sizeof(std_validation_names[0]), + std_validation_names, (uint32_t *)&pCreateInfo->enabledLayerCount, + (char ***)&pCreateInfo->ppEnabledLayerNames); + + /* fetch a list of all layers activated, explicit and implicit */ + res = loader_enable_device_layers(inst, icd, &activated_layer_list, + pCreateInfo, &inst->device_layer_list); + if (res != VK_SUCCESS) { + loader_unexpand_dev_layer_names(inst, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + return res; + } + + /* make sure requested extensions to be enabled are supported */ + res = loader_validate_device_extensions(phys_dev, &activated_layer_list, + pCreateInfo); + if (res != VK_SUCCESS) { + loader_unexpand_dev_layer_names(inst, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_destroy_generic_list( + inst, (struct loader_generic_list *)&activated_layer_list); + return res; + } + + dev = loader_add_logical_device(inst, &icd->logical_device_list); + if (dev == NULL) { + loader_unexpand_dev_layer_names(inst, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_destroy_generic_list( + inst, (struct loader_generic_list *)&activated_layer_list); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + /* move the locally filled layer list into the device, and pass ownership of + * the memory */ + dev->activated_layer_list.capacity = activated_layer_list.capacity; + dev->activated_layer_list.count = activated_layer_list.count; + dev->activated_layer_list.list = activated_layer_list.list; + memset(&activated_layer_list, 0, sizeof(activated_layer_list)); + + /* activate any layers on device chain which terminates with device*/ + res = loader_enable_device_layers(inst, icd, &dev->activated_layer_list, + pCreateInfo, &inst->device_layer_list); + if (res != VK_SUCCESS) { + loader_unexpand_dev_layer_names(inst, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_remove_logical_device(inst, icd, dev); + return res; + } + + res = loader_create_device_chain(physicalDevice, pCreateInfo, pAllocator, + inst, icd, dev); + if (res != VK_SUCCESS) { + loader_unexpand_dev_layer_names(inst, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_remove_logical_device(inst, icd, dev); + return res; + } + + *pDevice = dev->device; + + /* initialize any device extension dispatch entry's from the instance list*/ + loader_init_dispatch_dev_ext(inst, dev); + + /* initialize WSI device extensions as part of core dispatch since loader + * has + * dedicated trampoline code for these*/ + loader_init_device_extension_dispatch_table( + &dev->loader_dispatch, + dev->loader_dispatch.core_dispatch.GetDeviceProcAddr, *pDevice); + + loader_unexpand_dev_layer_names(inst, saved_layer_count, saved_layer_names, + saved_layer_ptr, pCreateInfo); + return res; +} + +/** + * Get an instance level or global level entry point address. + * @param instance + * @param pName + * @return + * If instance == NULL returns a global level functions only + * If instance is valid returns a trampoline entry point for all dispatchable + * Vulkan + * functions both core and extensions. + */ +LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL +vkGetInstanceProcAddr(VkInstance instance, const char *pName) { + + void *addr; + + addr = globalGetProcAddr(pName); + if (instance == VK_NULL_HANDLE) { + // get entrypoint addresses that are global (no dispatchable object) + + return addr; + } else { + // if a global entrypoint return NULL + if (addr) + return NULL; + } + + struct loader_instance *ptr_instance = loader_get_instance(instance); + if (ptr_instance == NULL) + return NULL; + // Return trampoline code for non-global entrypoints including any + // extensions. + // Device extensions are returned if a layer or ICD supports the extension. + // Instance extensions are returned if the extension is enabled and the + // loader + // or someone else supports the extension + return trampolineGetProcAddr(ptr_instance, pName); +} + +/** + * Get a device level or global level entry point address. + * @param device + * @param pName + * @return + * If device is valid, returns a device relative entry point for device level + * entry points both core and extensions. + * Device relative means call down the device chain. + */ +LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL +vkGetDeviceProcAddr(VkDevice device, const char *pName) { + void *addr; + + /* for entrypoints that loader must handle (ie non-dispatchable or create + object) + make sure the loader entrypoint is returned */ + addr = loader_non_passthrough_gdpa(pName); + if (addr) { + return addr; + } + + /* Although CreateDevice is on device chain it's dispatchable object isn't + * a VkDevice or child of VkDevice so return NULL. + */ + if (!strcmp(pName, "CreateDevice")) + return NULL; + + /* return the dispatch table entrypoint for the fastest case */ + const VkLayerDispatchTable *disp_table = *(VkLayerDispatchTable **)device; + if (disp_table == NULL) + return NULL; + + addr = loader_lookup_device_dispatch_table(disp_table, pName); + if (addr) + return addr; + + if (disp_table->GetDeviceProcAddr == NULL) + return NULL; + return disp_table->GetDeviceProcAddr(device, pName); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkEnumerateInstanceExtensionProperties(const char *pLayerName, + uint32_t *pPropertyCount, + VkExtensionProperties *pProperties) { + struct loader_extension_list *global_ext_list = NULL; + struct loader_layer_list instance_layers; + struct loader_extension_list icd_extensions; + struct loader_icd_libs icd_libs; + uint32_t copy_size; + + tls_instance = NULL; + memset(&icd_extensions, 0, sizeof(icd_extensions)); + memset(&instance_layers, 0, sizeof(instance_layers)); + loader_platform_thread_once(&once_init, loader_initialize); + + /* get layer libraries if needed */ + if (pLayerName && strlen(pLayerName) != 0) { + if (vk_string_validate(MaxLoaderStringLength, pLayerName) == + VK_STRING_ERROR_NONE) { + loader_layer_scan(NULL, &instance_layers, NULL); + for (uint32_t i = 0; i < instance_layers.count; i++) { + struct loader_layer_properties *props = + &instance_layers.list[i]; + if (strcmp(props->info.layerName, pLayerName) == 0) { + global_ext_list = &props->instance_extension_list; + } + } + } else { + assert(VK_FALSE && "vkEnumerateInstanceExtensionProperties: " + "pLayerName is too long or is badly formed"); + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } else { + /* Scan/discover all ICD libraries */ + memset(&icd_libs, 0, sizeof(struct loader_icd_libs)); + loader_icd_scan(NULL, &icd_libs); + /* get extensions from all ICD's, merge so no duplicates */ + loader_get_icd_loader_instance_extensions(NULL, &icd_libs, + &icd_extensions); + loader_scanned_icd_clear(NULL, &icd_libs); + global_ext_list = &icd_extensions; + } + + if (global_ext_list == NULL) { + loader_destroy_layer_list(NULL, &instance_layers); + return VK_ERROR_LAYER_NOT_PRESENT; + } + + if (pProperties == NULL) { + *pPropertyCount = global_ext_list->count; + loader_destroy_layer_list(NULL, &instance_layers); + loader_destroy_generic_list( + NULL, (struct loader_generic_list *)&icd_extensions); + return VK_SUCCESS; + } + + copy_size = *pPropertyCount < global_ext_list->count + ? *pPropertyCount + : global_ext_list->count; + for (uint32_t i = 0; i < copy_size; i++) { + memcpy(&pProperties[i], &global_ext_list->list[i], + sizeof(VkExtensionProperties)); + } + *pPropertyCount = copy_size; + loader_destroy_generic_list(NULL, + (struct loader_generic_list *)&icd_extensions); + + if (copy_size < global_ext_list->count) { + loader_destroy_layer_list(NULL, &instance_layers); + return VK_INCOMPLETE; + } + + loader_destroy_layer_list(NULL, &instance_layers); + return VK_SUCCESS; +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, + VkLayerProperties *pProperties) { + + struct loader_layer_list instance_layer_list; + tls_instance = NULL; + + loader_platform_thread_once(&once_init, loader_initialize); + + uint32_t copy_size; + + /* get layer libraries */ + memset(&instance_layer_list, 0, sizeof(instance_layer_list)); + loader_layer_scan(NULL, &instance_layer_list, NULL); + + if (pProperties == NULL) { + *pPropertyCount = instance_layer_list.count; + loader_destroy_layer_list(NULL, &instance_layer_list); + return VK_SUCCESS; + } + + copy_size = (*pPropertyCount < instance_layer_list.count) + ? *pPropertyCount + : instance_layer_list.count; + for (uint32_t i = 0; i < copy_size; i++) { + memcpy(&pProperties[i], &instance_layer_list.list[i].info, + sizeof(VkLayerProperties)); + } + + *pPropertyCount = copy_size; + loader_destroy_layer_list(NULL, &instance_layer_list); + + if (copy_size < instance_layer_list.count) { + return VK_INCOMPLETE; + } + + return VK_SUCCESS; +} + +VKAPI_ATTR VkResult VKAPI_CALL +loader_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, + const char *pLayerName, + uint32_t *pPropertyCount, + VkExtensionProperties *pProperties) { + struct loader_physical_device *phys_dev; + uint32_t copy_size; + + uint32_t count; + struct loader_device_extension_list *dev_ext_list = NULL; + struct loader_layer_list implicit_layer_list; + + // TODO fix this aliases physical devices + phys_dev = loader_get_physical_device(physicalDevice); + + /* get layer libraries if needed */ + if (pLayerName && strlen(pLayerName) != 0) { + if (vk_string_validate(MaxLoaderStringLength, pLayerName) == + VK_STRING_ERROR_NONE) { + for (uint32_t i = 0; + i < phys_dev->this_instance->device_layer_list.count; i++) { + struct loader_layer_properties *props = + &phys_dev->this_instance->device_layer_list.list[i]; + if (strcmp(props->info.layerName, pLayerName) == 0) { + dev_ext_list = &props->device_extension_list; + } + } + count = (dev_ext_list == NULL) ? 0 : dev_ext_list->count; + if (pProperties == NULL) { + *pPropertyCount = count; + return VK_SUCCESS; + } + + copy_size = *pPropertyCount < count ? *pPropertyCount : count; + for (uint32_t i = 0; i < copy_size; i++) { + memcpy(&pProperties[i], &dev_ext_list->list[i].props, + sizeof(VkExtensionProperties)); + } + *pPropertyCount = copy_size; + + if (copy_size < count) { + return VK_INCOMPLETE; + } + } else { + loader_log(phys_dev->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, + 0, "vkEnumerateDeviceExtensionProperties: pLayerName " + "is too long or is badly formed"); + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + return VK_SUCCESS; + } else { + /* this case is during the call down the instance chain with pLayerName + * == NULL*/ + struct loader_icd *icd = phys_dev->this_icd; + uint32_t icd_ext_count = *pPropertyCount; + VkResult res; + + /* get device extensions */ + res = icd->EnumerateDeviceExtensionProperties( + phys_dev->phys_dev, NULL, &icd_ext_count, pProperties); + if (res != VK_SUCCESS) + return res; + + loader_init_layer_list(phys_dev->this_instance, &implicit_layer_list); + + loader_add_layer_implicit( + phys_dev->this_instance, VK_LAYER_TYPE_INSTANCE_IMPLICIT, + &implicit_layer_list, + &phys_dev->this_instance->instance_layer_list); + /* we need to determine which implicit layers are active, + * and then add their extensions. This can't be cached as + * it depends on results of environment variables (which can change). + */ + if (pProperties != NULL) { + /* initialize dev_extension list within the physicalDevice object */ + res = loader_init_device_extensions( + phys_dev->this_instance, phys_dev, icd_ext_count, pProperties, + &phys_dev->device_extension_cache); + if (res != VK_SUCCESS) + return res; + + /* we need to determine which implicit layers are active, + * and then add their extensions. This can't be cached as + * it depends on results of environment variables (which can + * change). + */ + struct loader_extension_list all_exts = {0}; + loader_add_to_ext_list(phys_dev->this_instance, &all_exts, + phys_dev->device_extension_cache.count, + phys_dev->device_extension_cache.list); + + loader_init_layer_list(phys_dev->this_instance, + &implicit_layer_list); + + loader_add_layer_implicit( + phys_dev->this_instance, VK_LAYER_TYPE_INSTANCE_IMPLICIT, + &implicit_layer_list, + &phys_dev->this_instance->instance_layer_list); + + for (uint32_t i = 0; i < implicit_layer_list.count; i++) { + for ( + uint32_t j = 0; + j < implicit_layer_list.list[i].device_extension_list.count; + j++) { + loader_add_to_ext_list(phys_dev->this_instance, &all_exts, + 1, + &implicit_layer_list.list[i] + .device_extension_list.list[j] + .props); + } + } + uint32_t capacity = *pPropertyCount; + VkExtensionProperties *props = pProperties; + + for (uint32_t i = 0; i < all_exts.count && i < capacity; i++) { + props[i] = all_exts.list[i]; + } + /* wasn't enough space for the extensions, we did partial copy now + * return VK_INCOMPLETE */ + if (capacity < all_exts.count) { + res = VK_INCOMPLETE; + } else { + *pPropertyCount = all_exts.count; + } + loader_destroy_generic_list( + phys_dev->this_instance, + (struct loader_generic_list *)&all_exts); + } else { + /* just return the count; need to add in the count of implicit layer + * extensions + * don't worry about duplicates being added in the count */ + *pPropertyCount = icd_ext_count; + + for (uint32_t i = 0; i < implicit_layer_list.count; i++) { + *pPropertyCount += + implicit_layer_list.list[i].device_extension_list.count; + } + res = VK_SUCCESS; + } + + loader_destroy_generic_list( + phys_dev->this_instance, + (struct loader_generic_list *)&implicit_layer_list); + return res; + } +} + +VKAPI_ATTR VkResult VKAPI_CALL +loader_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, + uint32_t *pPropertyCount, + VkLayerProperties *pProperties) { + uint32_t copy_size; + struct loader_physical_device *phys_dev; + // TODO fix this, aliases physical devices + phys_dev = loader_get_physical_device(physicalDevice); + uint32_t count = phys_dev->this_instance->device_layer_list.count; + + if (pProperties == NULL) { + *pPropertyCount = count; + return VK_SUCCESS; + } + + copy_size = (*pPropertyCount < count) ? *pPropertyCount : count; + for (uint32_t i = 0; i < copy_size; i++) { + memcpy(&pProperties[i], + &(phys_dev->this_instance->device_layer_list.list[i].info), + sizeof(VkLayerProperties)); + } + *pPropertyCount = copy_size; + + if (copy_size < count) { + return VK_INCOMPLETE; + } + + return VK_SUCCESS; +} + +VkStringErrorFlags vk_string_validate(const int max_length, const char *utf8) { + VkStringErrorFlags result = VK_STRING_ERROR_NONE; + int num_char_bytes; + int i, j; + + for (i = 0; i < max_length; i++) { + if (utf8[i] == 0) { + break; + } else if ((utf8[i] >= 0x20) && (utf8[i] < 0x7f)) { + num_char_bytes = 0; + } else if ((utf8[i] & UTF8_ONE_BYTE_MASK) == UTF8_ONE_BYTE_CODE) { + num_char_bytes = 1; + } else if ((utf8[i] & UTF8_TWO_BYTE_MASK) == UTF8_TWO_BYTE_CODE) { + num_char_bytes = 2; + } else if ((utf8[i] & UTF8_THREE_BYTE_MASK) == UTF8_THREE_BYTE_CODE) { + num_char_bytes = 3; + } else { + result = VK_STRING_ERROR_BAD_DATA; + } + + // Validate the following num_char_bytes of data + for (j = 0; (j < num_char_bytes) && (i < max_length); j++) { + if (++i == max_length) { + result |= VK_STRING_ERROR_LENGTH; + break; + } + if ((utf8[i] & UTF8_DATA_BYTE_MASK) != UTF8_DATA_BYTE_CODE) { + result |= VK_STRING_ERROR_BAD_DATA; + } + } + } + return result; +} diff --git a/third_party/vulkan/loader/loader.h b/third_party/vulkan/loader/loader.h new file mode 100644 index 000000000..06c8961f6 --- /dev/null +++ b/third_party/vulkan/loader/loader.h @@ -0,0 +1,551 @@ +/* + * + * Copyright (c) 2014-2016 The Khronos Group Inc. + * Copyright (c) 2014-2016 Valve Corporation + * Copyright (c) 2014-2016 LunarG, Inc. + * Copyright (C) 2015 Google Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Jon Ashburn + * Author: Courtney Goeltzenleuchter + * Author: Chia-I Wu + * Author: Chia-I Wu + * Author: Mark Lobodzinski + * + */ + +#ifndef LOADER_H +#define LOADER_H + +#include +#include + + +#include +#include +#include + +#if defined(__GNUC__) && __GNUC__ >= 4 +#define LOADER_EXPORT __attribute__((visibility("default"))) +#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) +#define LOADER_EXPORT __attribute__((visibility("default"))) +#else +#define LOADER_EXPORT +#endif + +#define MAX_STRING_SIZE 1024 +#define VK_MAJOR(version) (version >> 22) +#define VK_MINOR(version) ((version >> 12) & 0x3ff) +#define VK_PATCH(version) (version & 0xfff) + +enum layer_type { + VK_LAYER_TYPE_DEVICE_EXPLICIT = 0x1, + VK_LAYER_TYPE_INSTANCE_EXPLICIT = 0x2, + VK_LAYER_TYPE_GLOBAL_EXPLICIT = 0x3, // instance and device layer, bitwise + VK_LAYER_TYPE_DEVICE_IMPLICIT = 0x4, + VK_LAYER_TYPE_INSTANCE_IMPLICIT = 0x8, + VK_LAYER_TYPE_GLOBAL_IMPLICIT = 0xc, // instance and device layer, bitwise + VK_LAYER_TYPE_META_EXPLICT = 0x10, +}; + +typedef enum VkStringErrorFlagBits { + VK_STRING_ERROR_NONE = 0x00000000, + VK_STRING_ERROR_LENGTH = 0x00000001, + VK_STRING_ERROR_BAD_DATA = 0x00000002, +} VkStringErrorFlagBits; +typedef VkFlags VkStringErrorFlags; + +static const int MaxLoaderStringLength = 256; +static const char UTF8_ONE_BYTE_CODE = 0xC0; +static const char UTF8_ONE_BYTE_MASK = 0xE0; +static const char UTF8_TWO_BYTE_CODE = 0xE0; +static const char UTF8_TWO_BYTE_MASK = 0xF0; +static const char UTF8_THREE_BYTE_CODE = 0xF0; +static const char UTF8_THREE_BYTE_MASK = 0xF8; +static const char UTF8_DATA_BYTE_CODE = 0x80; +static const char UTF8_DATA_BYTE_MASK = 0xC0; + +static const char std_validation_names[9][VK_MAX_EXTENSION_NAME_SIZE] = { + "VK_LAYER_LUNARG_threading", "VK_LAYER_LUNARG_param_checker", + "VK_LAYER_LUNARG_device_limits", "VK_LAYER_LUNARG_object_tracker", + "VK_LAYER_LUNARG_image", "VK_LAYER_LUNARG_mem_tracker", + "VK_LAYER_LUNARG_draw_state", "VK_LAYER_LUNARG_swapchain", + "VK_LAYER_GOOGLE_unique_objects"}; + +// form of all dynamic lists/arrays +// only the list element should be changed +struct loader_generic_list { + size_t capacity; + uint32_t count; + void *list; +}; + +struct loader_extension_list { + size_t capacity; + uint32_t count; + VkExtensionProperties *list; +}; + +struct loader_dev_ext_props { + VkExtensionProperties props; + uint32_t entrypoint_count; + char **entrypoints; +}; + +struct loader_device_extension_list { + size_t capacity; + uint32_t count; + struct loader_dev_ext_props *list; +}; + +struct loader_name_value { + char name[MAX_STRING_SIZE]; + char value[MAX_STRING_SIZE]; +}; + +struct loader_lib_info { + char lib_name[MAX_STRING_SIZE]; + uint32_t ref_count; + loader_platform_dl_handle lib_handle; +}; + +struct loader_layer_functions { + char str_gipa[MAX_STRING_SIZE]; + char str_gdpa[MAX_STRING_SIZE]; + PFN_vkGetInstanceProcAddr get_instance_proc_addr; + PFN_vkGetDeviceProcAddr get_device_proc_addr; +}; + +struct loader_layer_properties { + VkLayerProperties info; + enum layer_type type; + char lib_name[MAX_STRING_SIZE]; + struct loader_layer_functions functions; + struct loader_extension_list instance_extension_list; + struct loader_device_extension_list device_extension_list; + struct loader_name_value disable_env_var; + struct loader_name_value enable_env_var; +}; + +struct loader_layer_list { + size_t capacity; + uint32_t count; + struct loader_layer_properties *list; +}; + +struct loader_layer_library_list { + size_t capacity; + uint32_t count; + struct loader_lib_info *list; +}; + +struct loader_dispatch_hash_list { + size_t capacity; + uint32_t count; + uint32_t *index; // index into the dev_ext dispatch table +}; + +#define MAX_NUM_DEV_EXTS 250 +// loader_dispatch_hash_entry and loader_dev_ext_dispatch_table.DevExt have one +// to one +// correspondence; one loader_dispatch_hash_entry for one DevExt dispatch entry. +// Also have a one to one correspondence with functions in dev_ext_trampoline.c +struct loader_dispatch_hash_entry { + char *func_name; + struct loader_dispatch_hash_list list; // to handle hashing collisions +}; + +typedef void(VKAPI_PTR *PFN_vkDevExt)(VkDevice device); +struct loader_dev_ext_dispatch_table { + PFN_vkDevExt DevExt[MAX_NUM_DEV_EXTS]; +}; + +struct loader_dev_dispatch_table { + VkLayerDispatchTable core_dispatch; + struct loader_dev_ext_dispatch_table ext_dispatch; +}; + +/* per CreateDevice structure */ +struct loader_device { + struct loader_dev_dispatch_table loader_dispatch; + VkDevice device; // device object from the icd + + uint32_t app_extension_count; + VkExtensionProperties *app_extension_props; + + struct loader_layer_list activated_layer_list; + + struct loader_device *next; +}; + +/* per ICD structure */ +struct loader_icd { + // pointers to find other structs + const struct loader_scanned_icds *this_icd_lib; + const struct loader_instance *this_instance; + + struct loader_device *logical_device_list; + VkInstance instance; // instance object from the icd + PFN_vkGetDeviceProcAddr GetDeviceProcAddr; + PFN_vkDestroyInstance DestroyInstance; + PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices; + PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures; + PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties; + PFN_vkGetPhysicalDeviceImageFormatProperties + GetPhysicalDeviceImageFormatProperties; + PFN_vkCreateDevice CreateDevice; + PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceQueueFamilyProperties + GetPhysicalDeviceQueueFamilyProperties; + PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties; + PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties + GetPhysicalDeviceSparseImageFormatProperties; + PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallbackEXT; + PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallbackEXT; + PFN_vkDebugReportMessageEXT DebugReportMessageEXT; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR + GetPhysicalDeviceSurfaceCapabilitiesKHR; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR + GetPhysicalDeviceSurfacePresentModesKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR + GetPhysicalDeviceWin32PresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_MIR_KHR + PFN_vkGetPhysicalDeviceMirPresentationSupportKHR + GetPhysicalDeviceMirPresentvationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR + GetPhysicalDeviceWaylandPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR + GetPhysicalDeviceXcbPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR + GetPhysicalDeviceXlibPresentationSupportKHR; +#endif + + struct loader_icd *next; +}; + +/* per ICD library structure */ +struct loader_icd_libs { + size_t capacity; + uint32_t count; + struct loader_scanned_icds *list; +}; + +/* per instance structure */ +struct loader_instance { + VkLayerInstanceDispatchTable *disp; // must be first entry in structure + + uint32_t total_gpu_count; + struct loader_physical_device *phys_devs; + uint32_t total_icd_count; + struct loader_icd *icds; + struct loader_instance *next; + struct loader_extension_list ext_list; // icds and loaders extensions + struct loader_icd_libs icd_libs; + struct loader_layer_list instance_layer_list; + struct loader_layer_list device_layer_list; + struct loader_dispatch_hash_entry disp_hash[MAX_NUM_DEV_EXTS]; + + struct loader_msg_callback_map_entry *icd_msg_callback_map; + + struct loader_layer_list activated_layer_list; + + VkInstance instance; + + bool debug_report_enabled; + VkLayerDbgFunctionNode *DbgFunctionHead; + + VkAllocationCallbacks alloc_callbacks; + + bool wsi_surface_enabled; +#ifdef VK_USE_PLATFORM_WIN32_KHR + bool wsi_win32_surface_enabled; +#endif +#ifdef VK_USE_PLATFORM_MIR_KHR + bool wsi_mir_surface_enabled; +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + bool wsi_wayland_surface_enabled; +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + bool wsi_xcb_surface_enabled; +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR + bool wsi_xlib_surface_enabled; +#endif +#ifdef VK_USE_PLATFORM_ANDROID_KHR + bool wsi_android_surface_enabled; +#endif +}; + +/* per enumerated PhysicalDevice structure */ +struct loader_physical_device { + VkLayerInstanceDispatchTable *disp; // must be first entry in structure + struct loader_instance *this_instance; + struct loader_icd *this_icd; + VkPhysicalDevice phys_dev; // object from ICD + /* + * Fill in the cache of available device extensions from + * this physical device. This cache can be used during CreateDevice + */ + struct loader_extension_list device_extension_cache; +}; + +struct loader_struct { + struct loader_instance *instances; + + unsigned int loaded_layer_lib_count; + size_t loaded_layer_lib_capacity; + struct loader_lib_info *loaded_layer_lib_list; + // TODO add ref counting of ICD libraries + // TODO use this struct loader_layer_library_list scanned_layer_libraries; + // TODO add list of icd libraries for ref counting them for closure +}; + +struct loader_scanned_icds { + char *lib_name; + loader_platform_dl_handle handle; + uint32_t api_version; + PFN_vkGetInstanceProcAddr GetInstanceProcAddr; + PFN_vkCreateInstance CreateInstance; + PFN_vkEnumerateInstanceExtensionProperties + EnumerateInstanceExtensionProperties; +}; + +static inline struct loader_instance *loader_instance(VkInstance instance) { + return (struct loader_instance *)instance; +} + +static inline void loader_set_dispatch(void *obj, const void *data) { + *((const void **)obj) = data; +} + +static inline VkLayerDispatchTable *loader_get_dispatch(const void *obj) { + return *((VkLayerDispatchTable **)obj); +} + +static inline struct loader_dev_dispatch_table * +loader_get_dev_dispatch(const void *obj) { + return *((struct loader_dev_dispatch_table **)obj); +} + +static inline VkLayerInstanceDispatchTable * +loader_get_instance_dispatch(const void *obj) { + return *((VkLayerInstanceDispatchTable **)obj); +} + +static inline void loader_init_dispatch(void *obj, const void *data) { +#ifdef DEBUG + assert(valid_loader_magic_value(obj) && + "Incompatible ICD, first dword must be initialized to " + "ICD_LOADER_MAGIC. See loader/README.md for details."); +#endif + + loader_set_dispatch(obj, data); +} + +/* global variables used across files */ +extern struct loader_struct loader; +extern THREAD_LOCAL_DECL struct loader_instance *tls_instance; +extern LOADER_PLATFORM_THREAD_ONCE_DEFINITION(once_init); +extern loader_platform_thread_mutex loader_lock; +extern loader_platform_thread_mutex loader_json_lock; +extern const VkLayerInstanceDispatchTable instance_disp; +extern const char *std_validation_str; + +struct loader_msg_callback_map_entry { + VkDebugReportCallbackEXT icd_obj; + VkDebugReportCallbackEXT loader_obj; +}; + +void loader_log(const struct loader_instance *inst, VkFlags msg_type, + int32_t msg_code, const char *format, ...); + +bool compare_vk_extension_properties(const VkExtensionProperties *op1, + const VkExtensionProperties *op2); + +VkResult loader_validate_layers(const struct loader_instance *inst, + const uint32_t layer_count, + const char *const *ppEnabledLayerNames, + const struct loader_layer_list *list); + +VkResult loader_validate_instance_extensions( + const struct loader_instance *inst, + const struct loader_extension_list *icd_exts, + const struct loader_layer_list *instance_layer, + const VkInstanceCreateInfo *pCreateInfo); + +/* instance layer chain termination entrypoint definitions */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateInstance(const VkInstanceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkInstance *pInstance); + +VKAPI_ATTR void VKAPI_CALL +loader_DestroyInstance(VkInstance instance, + const VkAllocationCallbacks *pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_EnumeratePhysicalDevices(VkInstance instance, + uint32_t *pPhysicalDeviceCount, + VkPhysicalDevice *pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL +loader_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures *pFeatures); + +VKAPI_ATTR void VKAPI_CALL +loader_GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties *pFormatInfo); + +VKAPI_ATTR VkResult VKAPI_CALL loader_GetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, + VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, + VkImageFormatProperties *pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL loader_GetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, + VkSampleCountFlagBits samples, VkImageUsageFlags usage, + VkImageTiling tiling, uint32_t *pNumProperties, + VkSparseImageFormatProperties *pProperties); + +VKAPI_ATTR void VKAPI_CALL +loader_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties *pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, + const char *pLayerName, + uint32_t *pCount, + VkExtensionProperties *pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, + uint32_t *pCount, + VkLayerProperties *pProperties); + +VKAPI_ATTR void VKAPI_CALL loader_GetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, uint32_t *pCount, + VkQueueFamilyProperties *pProperties); + +VKAPI_ATTR void VKAPI_CALL loader_GetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties *pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_create_device_terminator(VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDevice *pDevice); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkDevice *pDevice); + +/* helper function definitions */ +void loader_initialize(void); +bool has_vk_extension_property_array(const VkExtensionProperties *vk_ext_prop, + const uint32_t count, + const VkExtensionProperties *ext_array); +bool has_vk_extension_property(const VkExtensionProperties *vk_ext_prop, + const struct loader_extension_list *ext_list); + +VkResult loader_add_to_ext_list(const struct loader_instance *inst, + struct loader_extension_list *ext_list, + uint32_t prop_list_count, + const VkExtensionProperties *props); +void loader_destroy_generic_list(const struct loader_instance *inst, + struct loader_generic_list *list); +void loader_delete_layer_properties(const struct loader_instance *inst, + struct loader_layer_list *layer_list); +void loader_expand_layer_names( + const struct loader_instance *inst, const char *key_name, + uint32_t expand_count, + const char expand_names[][VK_MAX_EXTENSION_NAME_SIZE], + uint32_t *layer_count, char ***ppp_layer_names); +void loader_unexpand_dev_layer_names(const struct loader_instance *inst, + uint32_t layer_count, char **layer_names, + char **layer_ptr, + const VkDeviceCreateInfo *pCreateInfo); +void loader_unexpand_inst_layer_names(const struct loader_instance *inst, + uint32_t layer_count, char **layer_names, + char **layer_ptr, + const VkInstanceCreateInfo *pCreateInfo); +void loader_add_to_layer_list(const struct loader_instance *inst, + struct loader_layer_list *list, + uint32_t prop_list_count, + const struct loader_layer_properties *props); +void loader_scanned_icd_clear(const struct loader_instance *inst, + struct loader_icd_libs *icd_libs); +void loader_icd_scan(const struct loader_instance *inst, + struct loader_icd_libs *icds); +void loader_layer_scan(const struct loader_instance *inst, + struct loader_layer_list *instance_layers, + struct loader_layer_list *device_layers); +void loader_get_icd_loader_instance_extensions( + const struct loader_instance *inst, struct loader_icd_libs *icd_libs, + struct loader_extension_list *inst_exts); +struct loader_icd *loader_get_icd_and_device(const VkDevice device, + struct loader_device **found_dev); +void *loader_dev_ext_gpa(struct loader_instance *inst, const char *funcName); +void *loader_get_dev_ext_trampoline(uint32_t index); +struct loader_instance *loader_get_instance(const VkInstance instance); +void loader_remove_logical_device(const struct loader_instance *inst, + struct loader_icd *icd, + struct loader_device *found_dev); +VkResult +loader_enable_instance_layers(struct loader_instance *inst, + const VkInstanceCreateInfo *pCreateInfo, + const struct loader_layer_list *instance_layers); +void loader_deactivate_instance_layers(struct loader_instance *instance); + +VkResult loader_create_instance_chain(const VkInstanceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + struct loader_instance *inst, + VkInstance *created_instance); + +void loader_activate_instance_layer_extensions(struct loader_instance *inst, + VkInstance created_inst); + +void *loader_heap_alloc(const struct loader_instance *instance, size_t size, + VkSystemAllocationScope allocationScope); + +void loader_heap_free(const struct loader_instance *instance, void *pMemory); + +void *loader_tls_heap_alloc(size_t size); + +void loader_tls_heap_free(void *pMemory); + +VkStringErrorFlags vk_string_validate(const int max_length, + const char *char_array); + +#endif /* LOADER_H */ diff --git a/third_party/vulkan/loader/murmurhash.c b/third_party/vulkan/loader/murmurhash.c new file mode 100644 index 000000000..5e5d0de64 --- /dev/null +++ b/third_party/vulkan/loader/murmurhash.c @@ -0,0 +1,97 @@ + +/** + * `murmurhash.h' - murmurhash + * + * copyright (c) 2014 joseph werle + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + */ + +#include +#include +#include +#include "murmurhash.h" + +uint32_t murmurhash(const char *key, size_t len, uint32_t seed) { + uint32_t c1 = 0xcc9e2d51; + uint32_t c2 = 0x1b873593; + uint32_t r1 = 15; + uint32_t r2 = 13; + uint32_t m = 5; + uint32_t n = 0xe6546b64; + uint32_t h = 0; + uint32_t k = 0; + uint8_t *d = (uint8_t *)key; // 32 bit extract from `key' + const uint32_t *chunks = NULL; + const uint8_t *tail = NULL; // tail - last 8 bytes + int i = 0; + int l = (int)len / 4; // chunk length + + h = seed; + + chunks = (const uint32_t *)(d + l * 4); // body + tail = (const uint8_t *)(d + l * 4); // last 8 byte chunk of `key' + + // for each 4 byte chunk of `key' + for (i = -l; i != 0; ++i) { + // next 4 byte chunk of `key' + k = chunks[i]; + + // encode next 4 byte chunk of `key' + k *= c1; + k = (k << r1) | (k >> (32 - r1)); + k *= c2; + + // append to hash + h ^= k; + h = (h << r2) | (h >> (32 - r2)); + h = h * m + n; + } + + k = 0; + + // remainder + switch (len & 3) { // `len % 4' + case 3: + k ^= (tail[2] << 16); + case 2: + k ^= (tail[1] << 8); + + case 1: + k ^= tail[0]; + k *= c1; + k = (k << r1) | (k >> (32 - r1)); + k *= c2; + h ^= k; + } + + h ^= len; + + h ^= (h >> 16); + h *= 0x85ebca6b; + h ^= (h >> 13); + h *= 0xc2b2ae35; + h ^= (h >> 16); + + return h; +} diff --git a/third_party/vulkan/loader/murmurhash.h b/third_party/vulkan/loader/murmurhash.h new file mode 100644 index 000000000..775532e8b --- /dev/null +++ b/third_party/vulkan/loader/murmurhash.h @@ -0,0 +1,52 @@ + +/** + * `murmurhash.h' - murmurhash + * + * copyright (c) 2014 joseph werle + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + */ + +#ifndef MURMURHASH_H +#define MURMURHASH_H 1 + +#include + +#define MURMURHASH_VERSION "0.0.3" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * Returns a murmur hash of `key' based on `seed' + * using the MurmurHash3 algorithm + */ + +uint32_t murmurhash(const char *key, size_t len, uint32_t seed); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/third_party/vulkan/loader/premake5.lua b/third_party/vulkan/loader/premake5.lua new file mode 100644 index 000000000..35675f232 --- /dev/null +++ b/third_party/vulkan/loader/premake5.lua @@ -0,0 +1,24 @@ +group("third_party") +project("vulkan-loader") + uuid("07d77359-1618-43e6-8a4a-0ee9ddc5fa6a") + kind("StaticLib") + language("C++") + + defines({ + "_LIB", + }) + removedefines({ + "_UNICODE", + "UNICODE", + }) + includedirs({ + ".", + }) + recursive_platform_files() + + filter("platforms:Windows") + warnings("Off") -- Too many warnings. + characterset("MBCS") + defines({ + "VK_USE_PLATFORM_WIN32_KHR", + }) diff --git a/third_party/vulkan/loader/table_ops.h b/third_party/vulkan/loader/table_ops.h new file mode 100644 index 000000000..4bf8b410a --- /dev/null +++ b/third_party/vulkan/loader/table_ops.h @@ -0,0 +1,710 @@ +/* + * + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * Copyright (C) 2016 Google Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Courtney Goeltzenleuchter + * Author: Jon Ashburn + * Author: Ian Elliott + * Author: Tony Barbour + */ + +#include +#include +#include +#include "loader.h" +#include "vk_loader_platform.h" + +static VkResult vkDevExtError(VkDevice dev) { + struct loader_device *found_dev; + struct loader_icd *icd = loader_get_icd_and_device(dev, &found_dev); + + if (icd) + loader_log(icd->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0, + "Bad destination in loader trampoline dispatch," + "Are layers and extensions that you are calling enabled?"); + return VK_ERROR_EXTENSION_NOT_PRESENT; +} + +static inline void +loader_init_device_dispatch_table(struct loader_dev_dispatch_table *dev_table, + PFN_vkGetDeviceProcAddr gpa, VkDevice dev) { + VkLayerDispatchTable *table = &dev_table->core_dispatch; + for (uint32_t i = 0; i < MAX_NUM_DEV_EXTS; i++) + dev_table->ext_dispatch.DevExt[i] = (PFN_vkDevExt)vkDevExtError; + + table->GetDeviceProcAddr = + (PFN_vkGetDeviceProcAddr)gpa(dev, "vkGetDeviceProcAddr"); + table->DestroyDevice = (PFN_vkDestroyDevice)gpa(dev, "vkDestroyDevice"); + table->GetDeviceQueue = (PFN_vkGetDeviceQueue)gpa(dev, "vkGetDeviceQueue"); + table->QueueSubmit = (PFN_vkQueueSubmit)gpa(dev, "vkQueueSubmit"); + table->QueueWaitIdle = (PFN_vkQueueWaitIdle)gpa(dev, "vkQueueWaitIdle"); + table->DeviceWaitIdle = (PFN_vkDeviceWaitIdle)gpa(dev, "vkDeviceWaitIdle"); + table->AllocateMemory = (PFN_vkAllocateMemory)gpa(dev, "vkAllocateMemory"); + table->FreeMemory = (PFN_vkFreeMemory)gpa(dev, "vkFreeMemory"); + table->MapMemory = (PFN_vkMapMemory)gpa(dev, "vkMapMemory"); + table->UnmapMemory = (PFN_vkUnmapMemory)gpa(dev, "vkUnmapMemory"); + table->FlushMappedMemoryRanges = + (PFN_vkFlushMappedMemoryRanges)gpa(dev, "vkFlushMappedMemoryRanges"); + table->InvalidateMappedMemoryRanges = + (PFN_vkInvalidateMappedMemoryRanges)gpa( + dev, "vkInvalidateMappedMemoryRanges"); + table->GetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)gpa( + dev, "vkGetDeviceMemoryCommitment"); + table->GetImageSparseMemoryRequirements = + (PFN_vkGetImageSparseMemoryRequirements)gpa( + dev, "vkGetImageSparseMemoryRequirements"); + table->GetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)gpa( + dev, "vkGetBufferMemoryRequirements"); + table->GetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)gpa( + dev, "vkGetImageMemoryRequirements"); + table->BindBufferMemory = + (PFN_vkBindBufferMemory)gpa(dev, "vkBindBufferMemory"); + table->BindImageMemory = + (PFN_vkBindImageMemory)gpa(dev, "vkBindImageMemory"); + table->QueueBindSparse = + (PFN_vkQueueBindSparse)gpa(dev, "vkQueueBindSparse"); + table->CreateFence = (PFN_vkCreateFence)gpa(dev, "vkCreateFence"); + table->DestroyFence = (PFN_vkDestroyFence)gpa(dev, "vkDestroyFence"); + table->ResetFences = (PFN_vkResetFences)gpa(dev, "vkResetFences"); + table->GetFenceStatus = (PFN_vkGetFenceStatus)gpa(dev, "vkGetFenceStatus"); + table->WaitForFences = (PFN_vkWaitForFences)gpa(dev, "vkWaitForFences"); + table->CreateSemaphore = + (PFN_vkCreateSemaphore)gpa(dev, "vkCreateSemaphore"); + table->DestroySemaphore = + (PFN_vkDestroySemaphore)gpa(dev, "vkDestroySemaphore"); + table->CreateEvent = (PFN_vkCreateEvent)gpa(dev, "vkCreateEvent"); + table->DestroyEvent = (PFN_vkDestroyEvent)gpa(dev, "vkDestroyEvent"); + table->GetEventStatus = (PFN_vkGetEventStatus)gpa(dev, "vkGetEventStatus"); + table->SetEvent = (PFN_vkSetEvent)gpa(dev, "vkSetEvent"); + table->ResetEvent = (PFN_vkResetEvent)gpa(dev, "vkResetEvent"); + table->CreateQueryPool = + (PFN_vkCreateQueryPool)gpa(dev, "vkCreateQueryPool"); + table->DestroyQueryPool = + (PFN_vkDestroyQueryPool)gpa(dev, "vkDestroyQueryPool"); + table->GetQueryPoolResults = + (PFN_vkGetQueryPoolResults)gpa(dev, "vkGetQueryPoolResults"); + table->CreateBuffer = (PFN_vkCreateBuffer)gpa(dev, "vkCreateBuffer"); + table->DestroyBuffer = (PFN_vkDestroyBuffer)gpa(dev, "vkDestroyBuffer"); + table->CreateBufferView = + (PFN_vkCreateBufferView)gpa(dev, "vkCreateBufferView"); + table->DestroyBufferView = + (PFN_vkDestroyBufferView)gpa(dev, "vkDestroyBufferView"); + table->CreateImage = (PFN_vkCreateImage)gpa(dev, "vkCreateImage"); + table->DestroyImage = (PFN_vkDestroyImage)gpa(dev, "vkDestroyImage"); + table->GetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)gpa( + dev, "vkGetImageSubresourceLayout"); + table->CreateImageView = + (PFN_vkCreateImageView)gpa(dev, "vkCreateImageView"); + table->DestroyImageView = + (PFN_vkDestroyImageView)gpa(dev, "vkDestroyImageView"); + table->CreateShaderModule = + (PFN_vkCreateShaderModule)gpa(dev, "vkCreateShaderModule"); + table->DestroyShaderModule = + (PFN_vkDestroyShaderModule)gpa(dev, "vkDestroyShaderModule"); + table->CreatePipelineCache = + (PFN_vkCreatePipelineCache)gpa(dev, "vkCreatePipelineCache"); + table->DestroyPipelineCache = + (PFN_vkDestroyPipelineCache)gpa(dev, "vkDestroyPipelineCache"); + table->GetPipelineCacheData = + (PFN_vkGetPipelineCacheData)gpa(dev, "vkGetPipelineCacheData"); + table->MergePipelineCaches = + (PFN_vkMergePipelineCaches)gpa(dev, "vkMergePipelineCaches"); + table->CreateGraphicsPipelines = + (PFN_vkCreateGraphicsPipelines)gpa(dev, "vkCreateGraphicsPipelines"); + table->CreateComputePipelines = + (PFN_vkCreateComputePipelines)gpa(dev, "vkCreateComputePipelines"); + table->DestroyPipeline = + (PFN_vkDestroyPipeline)gpa(dev, "vkDestroyPipeline"); + table->CreatePipelineLayout = + (PFN_vkCreatePipelineLayout)gpa(dev, "vkCreatePipelineLayout"); + table->DestroyPipelineLayout = + (PFN_vkDestroyPipelineLayout)gpa(dev, "vkDestroyPipelineLayout"); + table->CreateSampler = (PFN_vkCreateSampler)gpa(dev, "vkCreateSampler"); + table->DestroySampler = (PFN_vkDestroySampler)gpa(dev, "vkDestroySampler"); + table->CreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)gpa( + dev, "vkCreateDescriptorSetLayout"); + table->DestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)gpa( + dev, "vkDestroyDescriptorSetLayout"); + table->CreateDescriptorPool = + (PFN_vkCreateDescriptorPool)gpa(dev, "vkCreateDescriptorPool"); + table->DestroyDescriptorPool = + (PFN_vkDestroyDescriptorPool)gpa(dev, "vkDestroyDescriptorPool"); + table->ResetDescriptorPool = + (PFN_vkResetDescriptorPool)gpa(dev, "vkResetDescriptorPool"); + table->AllocateDescriptorSets = + (PFN_vkAllocateDescriptorSets)gpa(dev, "vkAllocateDescriptorSets"); + table->FreeDescriptorSets = + (PFN_vkFreeDescriptorSets)gpa(dev, "vkFreeDescriptorSets"); + table->UpdateDescriptorSets = + (PFN_vkUpdateDescriptorSets)gpa(dev, "vkUpdateDescriptorSets"); + table->CreateFramebuffer = + (PFN_vkCreateFramebuffer)gpa(dev, "vkCreateFramebuffer"); + table->DestroyFramebuffer = + (PFN_vkDestroyFramebuffer)gpa(dev, "vkDestroyFramebuffer"); + table->CreateRenderPass = + (PFN_vkCreateRenderPass)gpa(dev, "vkCreateRenderPass"); + table->DestroyRenderPass = + (PFN_vkDestroyRenderPass)gpa(dev, "vkDestroyRenderPass"); + table->GetRenderAreaGranularity = + (PFN_vkGetRenderAreaGranularity)gpa(dev, "vkGetRenderAreaGranularity"); + table->CreateCommandPool = + (PFN_vkCreateCommandPool)gpa(dev, "vkCreateCommandPool"); + table->DestroyCommandPool = + (PFN_vkDestroyCommandPool)gpa(dev, "vkDestroyCommandPool"); + table->ResetCommandPool = + (PFN_vkResetCommandPool)gpa(dev, "vkResetCommandPool"); + table->AllocateCommandBuffers = + (PFN_vkAllocateCommandBuffers)gpa(dev, "vkAllocateCommandBuffers"); + table->FreeCommandBuffers = + (PFN_vkFreeCommandBuffers)gpa(dev, "vkFreeCommandBuffers"); + table->BeginCommandBuffer = + (PFN_vkBeginCommandBuffer)gpa(dev, "vkBeginCommandBuffer"); + table->EndCommandBuffer = + (PFN_vkEndCommandBuffer)gpa(dev, "vkEndCommandBuffer"); + table->ResetCommandBuffer = + (PFN_vkResetCommandBuffer)gpa(dev, "vkResetCommandBuffer"); + table->CmdBindPipeline = + (PFN_vkCmdBindPipeline)gpa(dev, "vkCmdBindPipeline"); + table->CmdSetViewport = (PFN_vkCmdSetViewport)gpa(dev, "vkCmdSetViewport"); + table->CmdSetScissor = (PFN_vkCmdSetScissor)gpa(dev, "vkCmdSetScissor"); + table->CmdSetLineWidth = + (PFN_vkCmdSetLineWidth)gpa(dev, "vkCmdSetLineWidth"); + table->CmdSetDepthBias = + (PFN_vkCmdSetDepthBias)gpa(dev, "vkCmdSetDepthBias"); + table->CmdSetBlendConstants = + (PFN_vkCmdSetBlendConstants)gpa(dev, "vkCmdSetBlendConstants"); + table->CmdSetDepthBounds = + (PFN_vkCmdSetDepthBounds)gpa(dev, "vkCmdSetDepthBounds"); + table->CmdSetStencilCompareMask = + (PFN_vkCmdSetStencilCompareMask)gpa(dev, "vkCmdSetStencilCompareMask"); + table->CmdSetStencilWriteMask = + (PFN_vkCmdSetStencilWriteMask)gpa(dev, "vkCmdSetStencilWriteMask"); + table->CmdSetStencilReference = + (PFN_vkCmdSetStencilReference)gpa(dev, "vkCmdSetStencilReference"); + table->CmdBindDescriptorSets = + (PFN_vkCmdBindDescriptorSets)gpa(dev, "vkCmdBindDescriptorSets"); + table->CmdBindVertexBuffers = + (PFN_vkCmdBindVertexBuffers)gpa(dev, "vkCmdBindVertexBuffers"); + table->CmdBindIndexBuffer = + (PFN_vkCmdBindIndexBuffer)gpa(dev, "vkCmdBindIndexBuffer"); + table->CmdDraw = (PFN_vkCmdDraw)gpa(dev, "vkCmdDraw"); + table->CmdDrawIndexed = (PFN_vkCmdDrawIndexed)gpa(dev, "vkCmdDrawIndexed"); + table->CmdDrawIndirect = + (PFN_vkCmdDrawIndirect)gpa(dev, "vkCmdDrawIndirect"); + table->CmdDrawIndexedIndirect = + (PFN_vkCmdDrawIndexedIndirect)gpa(dev, "vkCmdDrawIndexedIndirect"); + table->CmdDispatch = (PFN_vkCmdDispatch)gpa(dev, "vkCmdDispatch"); + table->CmdDispatchIndirect = + (PFN_vkCmdDispatchIndirect)gpa(dev, "vkCmdDispatchIndirect"); + table->CmdCopyBuffer = (PFN_vkCmdCopyBuffer)gpa(dev, "vkCmdCopyBuffer"); + table->CmdCopyImage = (PFN_vkCmdCopyImage)gpa(dev, "vkCmdCopyImage"); + table->CmdBlitImage = (PFN_vkCmdBlitImage)gpa(dev, "vkCmdBlitImage"); + table->CmdCopyBufferToImage = + (PFN_vkCmdCopyBufferToImage)gpa(dev, "vkCmdCopyBufferToImage"); + table->CmdCopyImageToBuffer = + (PFN_vkCmdCopyImageToBuffer)gpa(dev, "vkCmdCopyImageToBuffer"); + table->CmdUpdateBuffer = + (PFN_vkCmdUpdateBuffer)gpa(dev, "vkCmdUpdateBuffer"); + table->CmdFillBuffer = (PFN_vkCmdFillBuffer)gpa(dev, "vkCmdFillBuffer"); + table->CmdClearColorImage = + (PFN_vkCmdClearColorImage)gpa(dev, "vkCmdClearColorImage"); + table->CmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)gpa( + dev, "vkCmdClearDepthStencilImage"); + table->CmdClearAttachments = + (PFN_vkCmdClearAttachments)gpa(dev, "vkCmdClearAttachments"); + table->CmdResolveImage = + (PFN_vkCmdResolveImage)gpa(dev, "vkCmdResolveImage"); + table->CmdSetEvent = (PFN_vkCmdSetEvent)gpa(dev, "vkCmdSetEvent"); + table->CmdResetEvent = (PFN_vkCmdResetEvent)gpa(dev, "vkCmdResetEvent"); + table->CmdWaitEvents = (PFN_vkCmdWaitEvents)gpa(dev, "vkCmdWaitEvents"); + table->CmdPipelineBarrier = + (PFN_vkCmdPipelineBarrier)gpa(dev, "vkCmdPipelineBarrier"); + table->CmdBeginQuery = (PFN_vkCmdBeginQuery)gpa(dev, "vkCmdBeginQuery"); + table->CmdEndQuery = (PFN_vkCmdEndQuery)gpa(dev, "vkCmdEndQuery"); + table->CmdResetQueryPool = + (PFN_vkCmdResetQueryPool)gpa(dev, "vkCmdResetQueryPool"); + table->CmdWriteTimestamp = + (PFN_vkCmdWriteTimestamp)gpa(dev, "vkCmdWriteTimestamp"); + table->CmdCopyQueryPoolResults = + (PFN_vkCmdCopyQueryPoolResults)gpa(dev, "vkCmdCopyQueryPoolResults"); + table->CmdPushConstants = + (PFN_vkCmdPushConstants)gpa(dev, "vkCmdPushConstants"); + table->CmdBeginRenderPass = + (PFN_vkCmdBeginRenderPass)gpa(dev, "vkCmdBeginRenderPass"); + table->CmdNextSubpass = (PFN_vkCmdNextSubpass)gpa(dev, "vkCmdNextSubpass"); + table->CmdEndRenderPass = + (PFN_vkCmdEndRenderPass)gpa(dev, "vkCmdEndRenderPass"); + table->CmdExecuteCommands = + (PFN_vkCmdExecuteCommands)gpa(dev, "vkCmdExecuteCommands"); +} + +static inline void loader_init_device_extension_dispatch_table( + struct loader_dev_dispatch_table *dev_table, PFN_vkGetDeviceProcAddr gpa, + VkDevice dev) { + VkLayerDispatchTable *table = &dev_table->core_dispatch; + table->AcquireNextImageKHR = + (PFN_vkAcquireNextImageKHR)gpa(dev, "vkAcquireNextImageKHR"); + table->CreateSwapchainKHR = + (PFN_vkCreateSwapchainKHR)gpa(dev, "vkCreateSwapchainKHR"); + table->DestroySwapchainKHR = + (PFN_vkDestroySwapchainKHR)gpa(dev, "vkDestroySwapchainKHR"); + table->GetSwapchainImagesKHR = + (PFN_vkGetSwapchainImagesKHR)gpa(dev, "vkGetSwapchainImagesKHR"); + table->QueuePresentKHR = + (PFN_vkQueuePresentKHR)gpa(dev, "vkQueuePresentKHR"); +} + +static inline void * +loader_lookup_device_dispatch_table(const VkLayerDispatchTable *table, + const char *name) { + if (!name || name[0] != 'v' || name[1] != 'k') + return NULL; + + name += 2; + if (!strcmp(name, "GetDeviceProcAddr")) + return (void *)table->GetDeviceProcAddr; + if (!strcmp(name, "DestroyDevice")) + return (void *)table->DestroyDevice; + if (!strcmp(name, "GetDeviceQueue")) + return (void *)table->GetDeviceQueue; + if (!strcmp(name, "QueueSubmit")) + return (void *)table->QueueSubmit; + if (!strcmp(name, "QueueWaitIdle")) + return (void *)table->QueueWaitIdle; + if (!strcmp(name, "DeviceWaitIdle")) + return (void *)table->DeviceWaitIdle; + if (!strcmp(name, "AllocateMemory")) + return (void *)table->AllocateMemory; + if (!strcmp(name, "FreeMemory")) + return (void *)table->FreeMemory; + if (!strcmp(name, "MapMemory")) + return (void *)table->MapMemory; + if (!strcmp(name, "UnmapMemory")) + return (void *)table->UnmapMemory; + if (!strcmp(name, "FlushMappedMemoryRanges")) + return (void *)table->FlushMappedMemoryRanges; + if (!strcmp(name, "InvalidateMappedMemoryRanges")) + return (void *)table->InvalidateMappedMemoryRanges; + if (!strcmp(name, "GetDeviceMemoryCommitment")) + return (void *)table->GetDeviceMemoryCommitment; + if (!strcmp(name, "GetImageSparseMemoryRequirements")) + return (void *)table->GetImageSparseMemoryRequirements; + if (!strcmp(name, "GetBufferMemoryRequirements")) + return (void *)table->GetBufferMemoryRequirements; + if (!strcmp(name, "GetImageMemoryRequirements")) + return (void *)table->GetImageMemoryRequirements; + if (!strcmp(name, "BindBufferMemory")) + return (void *)table->BindBufferMemory; + if (!strcmp(name, "BindImageMemory")) + return (void *)table->BindImageMemory; + if (!strcmp(name, "QueueBindSparse")) + return (void *)table->QueueBindSparse; + if (!strcmp(name, "CreateFence")) + return (void *)table->CreateFence; + if (!strcmp(name, "DestroyFence")) + return (void *)table->DestroyFence; + if (!strcmp(name, "ResetFences")) + return (void *)table->ResetFences; + if (!strcmp(name, "GetFenceStatus")) + return (void *)table->GetFenceStatus; + if (!strcmp(name, "WaitForFences")) + return (void *)table->WaitForFences; + if (!strcmp(name, "CreateSemaphore")) + return (void *)table->CreateSemaphore; + if (!strcmp(name, "DestroySemaphore")) + return (void *)table->DestroySemaphore; + if (!strcmp(name, "CreateEvent")) + return (void *)table->CreateEvent; + if (!strcmp(name, "DestroyEvent")) + return (void *)table->DestroyEvent; + if (!strcmp(name, "GetEventStatus")) + return (void *)table->GetEventStatus; + if (!strcmp(name, "SetEvent")) + return (void *)table->SetEvent; + if (!strcmp(name, "ResetEvent")) + return (void *)table->ResetEvent; + if (!strcmp(name, "CreateQueryPool")) + return (void *)table->CreateQueryPool; + if (!strcmp(name, "DestroyQueryPool")) + return (void *)table->DestroyQueryPool; + if (!strcmp(name, "GetQueryPoolResults")) + return (void *)table->GetQueryPoolResults; + if (!strcmp(name, "CreateBuffer")) + return (void *)table->CreateBuffer; + if (!strcmp(name, "DestroyBuffer")) + return (void *)table->DestroyBuffer; + if (!strcmp(name, "CreateBufferView")) + return (void *)table->CreateBufferView; + if (!strcmp(name, "DestroyBufferView")) + return (void *)table->DestroyBufferView; + if (!strcmp(name, "CreateImage")) + return (void *)table->CreateImage; + if (!strcmp(name, "DestroyImage")) + return (void *)table->DestroyImage; + if (!strcmp(name, "GetImageSubresourceLayout")) + return (void *)table->GetImageSubresourceLayout; + if (!strcmp(name, "CreateImageView")) + return (void *)table->CreateImageView; + if (!strcmp(name, "DestroyImageView")) + return (void *)table->DestroyImageView; + if (!strcmp(name, "CreateShaderModule")) + return (void *)table->CreateShaderModule; + if (!strcmp(name, "DestroyShaderModule")) + return (void *)table->DestroyShaderModule; + if (!strcmp(name, "CreatePipelineCache")) + return (void *)vkCreatePipelineCache; + if (!strcmp(name, "DestroyPipelineCache")) + return (void *)vkDestroyPipelineCache; + if (!strcmp(name, "GetPipelineCacheData")) + return (void *)vkGetPipelineCacheData; + if (!strcmp(name, "MergePipelineCaches")) + return (void *)vkMergePipelineCaches; + if (!strcmp(name, "CreateGraphicsPipelines")) + return (void *)vkCreateGraphicsPipelines; + if (!strcmp(name, "CreateComputePipelines")) + return (void *)vkCreateComputePipelines; + if (!strcmp(name, "DestroyPipeline")) + return (void *)table->DestroyPipeline; + if (!strcmp(name, "CreatePipelineLayout")) + return (void *)table->CreatePipelineLayout; + if (!strcmp(name, "DestroyPipelineLayout")) + return (void *)table->DestroyPipelineLayout; + if (!strcmp(name, "CreateSampler")) + return (void *)table->CreateSampler; + if (!strcmp(name, "DestroySampler")) + return (void *)table->DestroySampler; + if (!strcmp(name, "CreateDescriptorSetLayout")) + return (void *)table->CreateDescriptorSetLayout; + if (!strcmp(name, "DestroyDescriptorSetLayout")) + return (void *)table->DestroyDescriptorSetLayout; + if (!strcmp(name, "CreateDescriptorPool")) + return (void *)table->CreateDescriptorPool; + if (!strcmp(name, "DestroyDescriptorPool")) + return (void *)table->DestroyDescriptorPool; + if (!strcmp(name, "ResetDescriptorPool")) + return (void *)table->ResetDescriptorPool; + if (!strcmp(name, "AllocateDescriptorSets")) + return (void *)table->AllocateDescriptorSets; + if (!strcmp(name, "FreeDescriptorSets")) + return (void *)table->FreeDescriptorSets; + if (!strcmp(name, "UpdateDescriptorSets")) + return (void *)table->UpdateDescriptorSets; + if (!strcmp(name, "CreateFramebuffer")) + return (void *)table->CreateFramebuffer; + if (!strcmp(name, "DestroyFramebuffer")) + return (void *)table->DestroyFramebuffer; + if (!strcmp(name, "CreateRenderPass")) + return (void *)table->CreateRenderPass; + if (!strcmp(name, "DestroyRenderPass")) + return (void *)table->DestroyRenderPass; + if (!strcmp(name, "GetRenderAreaGranularity")) + return (void *)table->GetRenderAreaGranularity; + if (!strcmp(name, "CreateCommandPool")) + return (void *)table->CreateCommandPool; + if (!strcmp(name, "DestroyCommandPool")) + return (void *)table->DestroyCommandPool; + if (!strcmp(name, "ResetCommandPool")) + return (void *)table->ResetCommandPool; + if (!strcmp(name, "AllocateCommandBuffers")) + return (void *)table->AllocateCommandBuffers; + if (!strcmp(name, "FreeCommandBuffers")) + return (void *)table->FreeCommandBuffers; + if (!strcmp(name, "BeginCommandBuffer")) + return (void *)table->BeginCommandBuffer; + if (!strcmp(name, "EndCommandBuffer")) + return (void *)table->EndCommandBuffer; + if (!strcmp(name, "ResetCommandBuffer")) + return (void *)table->ResetCommandBuffer; + if (!strcmp(name, "CmdBindPipeline")) + return (void *)table->CmdBindPipeline; + if (!strcmp(name, "CmdSetViewport")) + return (void *)table->CmdSetViewport; + if (!strcmp(name, "CmdSetScissor")) + return (void *)table->CmdSetScissor; + if (!strcmp(name, "CmdSetLineWidth")) + return (void *)table->CmdSetLineWidth; + if (!strcmp(name, "CmdSetDepthBias")) + return (void *)table->CmdSetDepthBias; + if (!strcmp(name, "CmdSetBlendConstants")) + return (void *)table->CmdSetBlendConstants; + if (!strcmp(name, "CmdSetDepthBounds")) + return (void *)table->CmdSetDepthBounds; + if (!strcmp(name, "CmdSetStencilCompareMask")) + return (void *)table->CmdSetStencilCompareMask; + if (!strcmp(name, "CmdSetStencilwriteMask")) + return (void *)table->CmdSetStencilWriteMask; + if (!strcmp(name, "CmdSetStencilReference")) + return (void *)table->CmdSetStencilReference; + if (!strcmp(name, "CmdBindDescriptorSets")) + return (void *)table->CmdBindDescriptorSets; + if (!strcmp(name, "CmdBindVertexBuffers")) + return (void *)table->CmdBindVertexBuffers; + if (!strcmp(name, "CmdBindIndexBuffer")) + return (void *)table->CmdBindIndexBuffer; + if (!strcmp(name, "CmdDraw")) + return (void *)table->CmdDraw; + if (!strcmp(name, "CmdDrawIndexed")) + return (void *)table->CmdDrawIndexed; + if (!strcmp(name, "CmdDrawIndirect")) + return (void *)table->CmdDrawIndirect; + if (!strcmp(name, "CmdDrawIndexedIndirect")) + return (void *)table->CmdDrawIndexedIndirect; + if (!strcmp(name, "CmdDispatch")) + return (void *)table->CmdDispatch; + if (!strcmp(name, "CmdDispatchIndirect")) + return (void *)table->CmdDispatchIndirect; + if (!strcmp(name, "CmdCopyBuffer")) + return (void *)table->CmdCopyBuffer; + if (!strcmp(name, "CmdCopyImage")) + return (void *)table->CmdCopyImage; + if (!strcmp(name, "CmdBlitImage")) + return (void *)table->CmdBlitImage; + if (!strcmp(name, "CmdCopyBufferToImage")) + return (void *)table->CmdCopyBufferToImage; + if (!strcmp(name, "CmdCopyImageToBuffer")) + return (void *)table->CmdCopyImageToBuffer; + if (!strcmp(name, "CmdUpdateBuffer")) + return (void *)table->CmdUpdateBuffer; + if (!strcmp(name, "CmdFillBuffer")) + return (void *)table->CmdFillBuffer; + if (!strcmp(name, "CmdClearColorImage")) + return (void *)table->CmdClearColorImage; + if (!strcmp(name, "CmdClearDepthStencilImage")) + return (void *)table->CmdClearDepthStencilImage; + if (!strcmp(name, "CmdClearAttachments")) + return (void *)table->CmdClearAttachments; + if (!strcmp(name, "CmdResolveImage")) + return (void *)table->CmdResolveImage; + if (!strcmp(name, "CmdSetEvent")) + return (void *)table->CmdSetEvent; + if (!strcmp(name, "CmdResetEvent")) + return (void *)table->CmdResetEvent; + if (!strcmp(name, "CmdWaitEvents")) + return (void *)table->CmdWaitEvents; + if (!strcmp(name, "CmdPipelineBarrier")) + return (void *)table->CmdPipelineBarrier; + if (!strcmp(name, "CmdBeginQuery")) + return (void *)table->CmdBeginQuery; + if (!strcmp(name, "CmdEndQuery")) + return (void *)table->CmdEndQuery; + if (!strcmp(name, "CmdResetQueryPool")) + return (void *)table->CmdResetQueryPool; + if (!strcmp(name, "CmdWriteTimestamp")) + return (void *)table->CmdWriteTimestamp; + if (!strcmp(name, "CmdCopyQueryPoolResults")) + return (void *)table->CmdCopyQueryPoolResults; + if (!strcmp(name, "CmdPushConstants")) + return (void *)table->CmdPushConstants; + if (!strcmp(name, "CmdBeginRenderPass")) + return (void *)table->CmdBeginRenderPass; + if (!strcmp(name, "CmdNextSubpass")) + return (void *)table->CmdNextSubpass; + if (!strcmp(name, "CmdEndRenderPass")) + return (void *)table->CmdEndRenderPass; + if (!strcmp(name, "CmdExecuteCommands")) + return (void *)table->CmdExecuteCommands; + + return NULL; +} + +static inline void +loader_init_instance_core_dispatch_table(VkLayerInstanceDispatchTable *table, + PFN_vkGetInstanceProcAddr gpa, + VkInstance inst) { + table->GetInstanceProcAddr = + (PFN_vkGetInstanceProcAddr)gpa(inst, "vkGetInstanceProcAddr"); + table->DestroyInstance = + (PFN_vkDestroyInstance)gpa(inst, "vkDestroyInstance"); + table->EnumeratePhysicalDevices = + (PFN_vkEnumeratePhysicalDevices)gpa(inst, "vkEnumeratePhysicalDevices"); + table->GetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures)gpa( + inst, "vkGetPhysicalDeviceFeatures"); + table->GetPhysicalDeviceImageFormatProperties = + (PFN_vkGetPhysicalDeviceImageFormatProperties)gpa( + inst, "vkGetPhysicalDeviceImageFormatProperties"); + table->GetPhysicalDeviceFormatProperties = + (PFN_vkGetPhysicalDeviceFormatProperties)gpa( + inst, "vkGetPhysicalDeviceFormatProperties"); + table->GetPhysicalDeviceSparseImageFormatProperties = + (PFN_vkGetPhysicalDeviceSparseImageFormatProperties)gpa( + inst, "vkGetPhysicalDeviceSparseImageFormatProperties"); + table->GetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)gpa( + inst, "vkGetPhysicalDeviceProperties"); + table->GetPhysicalDeviceQueueFamilyProperties = + (PFN_vkGetPhysicalDeviceQueueFamilyProperties)gpa( + inst, "vkGetPhysicalDeviceQueueFamilyProperties"); + table->GetPhysicalDeviceMemoryProperties = + (PFN_vkGetPhysicalDeviceMemoryProperties)gpa( + inst, "vkGetPhysicalDeviceMemoryProperties"); + table->EnumerateDeviceExtensionProperties = + (PFN_vkEnumerateDeviceExtensionProperties)gpa( + inst, "vkEnumerateDeviceExtensionProperties"); + table->EnumerateDeviceLayerProperties = + (PFN_vkEnumerateDeviceLayerProperties)gpa( + inst, "vkEnumerateDeviceLayerProperties"); +} + +static inline void loader_init_instance_extension_dispatch_table( + VkLayerInstanceDispatchTable *table, PFN_vkGetInstanceProcAddr gpa, + VkInstance inst) { + table->DestroySurfaceKHR = + (PFN_vkDestroySurfaceKHR)gpa(inst, "vkDestroySurfaceKHR"); + table->CreateDebugReportCallbackEXT = + (PFN_vkCreateDebugReportCallbackEXT)gpa( + inst, "vkCreateDebugReportCallbackEXT"); + table->DestroyDebugReportCallbackEXT = + (PFN_vkDestroyDebugReportCallbackEXT)gpa( + inst, "vkDestroyDebugReportCallbackEXT"); + table->DebugReportMessageEXT = + (PFN_vkDebugReportMessageEXT)gpa(inst, "vkDebugReportMessageEXT"); + table->GetPhysicalDeviceSurfaceSupportKHR = + (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)gpa( + inst, "vkGetPhysicalDeviceSurfaceSupportKHR"); + table->GetPhysicalDeviceSurfaceCapabilitiesKHR = + (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)gpa( + inst, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); + table->GetPhysicalDeviceSurfaceFormatsKHR = + (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)gpa( + inst, "vkGetPhysicalDeviceSurfaceFormatsKHR"); + table->GetPhysicalDeviceSurfacePresentModesKHR = + (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)gpa( + inst, "vkGetPhysicalDeviceSurfacePresentModesKHR"); +#ifdef VK_USE_PLATFORM_MIR_KHR + table->CreateMirSurfaceKHR = + (PFN_vkCreateMirSurfaceKHR)gpa(inst, "vkCreateMirSurfaceKHR"); + table->GetPhysicalDeviceMirPresentationSupportKHR = + (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)gpa( + inst, "vkGetPhysicalDeviceMirPresentationSupportKHR"); +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + table->CreateWaylandSurfaceKHR = + (PFN_vkCreateWaylandSurfaceKHR)gpa(inst, "vkCreateWaylandSurfaceKHR"); + table->GetPhysicalDeviceWaylandPresentationSupportKHR = + (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)gpa( + inst, "vkGetPhysicalDeviceWaylandPresentationSupportKHR"); +#endif +#ifdef VK_USE_PLATFORM_WIN32_KHR + table->CreateWin32SurfaceKHR = + (PFN_vkCreateWin32SurfaceKHR)gpa(inst, "vkCreateWin32SurfaceKHR"); + table->GetPhysicalDeviceWin32PresentationSupportKHR = + (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)gpa( + inst, "vkGetPhysicalDeviceWin32PresentationSupportKHR"); +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + table->CreateXcbSurfaceKHR = + (PFN_vkCreateXcbSurfaceKHR)gpa(inst, "vkCreateXcbSurfaceKHR"); + table->GetPhysicalDeviceXcbPresentationSupportKHR = + (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)gpa( + inst, "vkGetPhysicalDeviceXcbPresentationSupportKHR"); +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR + table->CreateXlibSurfaceKHR = + (PFN_vkCreateXlibSurfaceKHR)gpa(inst, "vkCreateXlibSurfaceKHR"); + table->GetPhysicalDeviceXlibPresentationSupportKHR = + (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)gpa( + inst, "vkGetPhysicalDeviceXlibPresentationSupportKHR"); +#endif +} + +static inline void * +loader_lookup_instance_dispatch_table(const VkLayerInstanceDispatchTable *table, + const char *name) { + if (!name || name[0] != 'v' || name[1] != 'k') + return NULL; + + name += 2; + if (!strcmp(name, "DestroyInstance")) + return (void *)table->DestroyInstance; + if (!strcmp(name, "EnumeratePhysicalDevices")) + return (void *)table->EnumeratePhysicalDevices; + if (!strcmp(name, "GetPhysicalDeviceFeatures")) + return (void *)table->GetPhysicalDeviceFeatures; + if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties")) + return (void *)table->GetPhysicalDeviceImageFormatProperties; + if (!strcmp(name, "GetPhysicalDeviceFormatProperties")) + return (void *)table->GetPhysicalDeviceFormatProperties; + if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties")) + return (void *)table->GetPhysicalDeviceSparseImageFormatProperties; + if (!strcmp(name, "GetPhysicalDeviceProperties")) + return (void *)table->GetPhysicalDeviceProperties; + if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties")) + return (void *)table->GetPhysicalDeviceQueueFamilyProperties; + if (!strcmp(name, "GetPhysicalDeviceMemoryProperties")) + return (void *)table->GetPhysicalDeviceMemoryProperties; + if (!strcmp(name, "GetInstanceProcAddr")) + return (void *)table->GetInstanceProcAddr; + if (!strcmp(name, "EnumerateDeviceExtensionProperties")) + return (void *)table->EnumerateDeviceExtensionProperties; + if (!strcmp(name, "EnumerateDeviceLayerProperties")) + return (void *)table->EnumerateDeviceLayerProperties; + if (!strcmp(name, "DestroySurfaceKHR")) + return (void *)table->DestroySurfaceKHR; + if (!strcmp(name, "GetPhysicalDeviceSurfaceSupportKHR")) + return (void *)table->GetPhysicalDeviceSurfaceSupportKHR; + if (!strcmp(name, "GetPhysicalDeviceSurfaceCapabilitiesKHR")) + return (void *)table->GetPhysicalDeviceSurfaceCapabilitiesKHR; + if (!strcmp(name, "GetPhysicalDeviceSurfaceFormatsKHR")) + return (void *)table->GetPhysicalDeviceSurfaceFormatsKHR; + if (!strcmp(name, "GetPhysicalDeviceSurfacePresentModesKHR")) + return (void *)table->GetPhysicalDeviceSurfacePresentModesKHR; +#ifdef VK_USE_PLATFORM_MIR_KHR + if (!strcmp(name, "CreateMirSurfaceKHR")) + return (void *)table->CreateMirSurfaceKHR; + if (!strcmp(name, "GetPhysicalDeviceMirPresentationSupportKHR")) + return (void *)table->GetPhysicalDeviceMirPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + if (!strcmp(name, "CreateWaylandSurfaceKHR")) + return (void *)table->CreateWaylandSurfaceKHR; + if (!strcmp(name, "GetPhysicalDeviceWaylandPresentationSupportKHR")) + return (void *)table->GetPhysicalDeviceWaylandPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_WIN32_KHR + if (!strcmp(name, "CreateWin32SurfaceKHR")) + return (void *)table->CreateWin32SurfaceKHR; + if (!strcmp(name, "GetPhysicalDeviceWin32PresentationSupportKHR")) + return (void *)table->GetPhysicalDeviceWin32PresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + if (!strcmp(name, "CreateXcbSurfaceKHR")) + return (void *)table->CreateXcbSurfaceKHR; + if (!strcmp(name, "GetPhysicalDeviceXcbPresentationSupportKHR")) + return (void *)table->GetPhysicalDeviceXcbPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR + if (!strcmp(name, "CreateXlibSurfaceKHR")) + return (void *)table->CreateXlibSurfaceKHR; + if (!strcmp(name, "GetPhysicalDeviceXlibPresentationSupportKHR")) + return (void *)table->GetPhysicalDeviceXlibPresentationSupportKHR; +#endif + if (!strcmp(name, "CreateDebugReportCallbackEXT")) + return (void *)table->CreateDebugReportCallbackEXT; + if (!strcmp(name, "DestroyDebugReportCallbackEXT")) + return (void *)table->DestroyDebugReportCallbackEXT; + if (!strcmp(name, "DebugReportMessageEXT")) + return (void *)table->DebugReportMessageEXT; + + return NULL; +} diff --git a/third_party/vulkan/loader/trampoline.c b/third_party/vulkan/loader/trampoline.c new file mode 100644 index 000000000..dfd2c0001 --- /dev/null +++ b/third_party/vulkan/loader/trampoline.c @@ -0,0 +1,1731 @@ +/* + * + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * Copyright (C) 2015 Google Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Courtney Goeltzenleuchter + * Author: Jon Ashburn + * Author: Tony Barbour + * Author: Chia-I Wu + */ +#define _GNU_SOURCE +#include +#include + +#include "vk_loader_platform.h" +#include "loader.h" +#include "debug_report.h" +#include "wsi.h" + +/* Trampoline entrypoints */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkInstance *pInstance) { + struct loader_instance *ptr_instance = NULL; + VkInstance created_instance = VK_NULL_HANDLE; + VkResult res = VK_ERROR_INITIALIZATION_FAILED; + VkDebugReportCallbackEXT instance_callback = VK_NULL_HANDLE; + void *pNext = (void *)pCreateInfo->pNext; + + loader_platform_thread_once(&once_init, loader_initialize); + +#if 0 + if (pAllocator) { + ptr_instance = (struct loader_instance *) pAllocator->pfnAllocation( + pAllocator->pUserData, + sizeof(struct loader_instance), + sizeof(int *), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + } else { +#endif + ptr_instance = + (struct loader_instance *)malloc(sizeof(struct loader_instance)); + //} + if (ptr_instance == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + tls_instance = ptr_instance; + loader_platform_thread_lock_mutex(&loader_lock); + memset(ptr_instance, 0, sizeof(struct loader_instance)); +#if 0 + if (pAllocator) { + ptr_instance->alloc_callbacks = *pAllocator; + } +#endif + + /* + * Look for a debug report create info structure + * and setup a callback if found. + */ + while (pNext) { + if (((VkInstanceCreateInfo *)pNext)->sType == + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT) { + instance_callback = (VkDebugReportCallbackEXT)ptr_instance; + if (util_CreateDebugReportCallback(ptr_instance, pNext, NULL, + instance_callback)) { + loader_heap_free(ptr_instance, ptr_instance); + loader_platform_thread_unlock_mutex(&loader_lock); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + } + pNext = (void *)((VkInstanceCreateInfo *)pNext)->pNext; + } + + /* Due to implicit layers need to get layer list even if + * enabledLayerCount == 0 and VK_INSTANCE_LAYERS is unset. For now always + * get layer list (both instance and device) via loader_layer_scan(). */ + memset(&ptr_instance->instance_layer_list, 0, + sizeof(ptr_instance->instance_layer_list)); + memset(&ptr_instance->device_layer_list, 0, + sizeof(ptr_instance->device_layer_list)); + loader_layer_scan(ptr_instance, &ptr_instance->instance_layer_list, + &ptr_instance->device_layer_list); + + /* validate the app requested layers to be enabled */ + if (pCreateInfo->enabledLayerCount > 0) { + res = + loader_validate_layers(ptr_instance, pCreateInfo->enabledLayerCount, + pCreateInfo->ppEnabledLayerNames, + &ptr_instance->instance_layer_list); + if (res != VK_SUCCESS) { + util_DestroyDebugReportCallback(ptr_instance, instance_callback, + NULL); + loader_heap_free(ptr_instance, ptr_instance); + loader_platform_thread_unlock_mutex(&loader_lock); + return res; + } + } + + /* convert any meta layers to the actual layers makes a copy of layer name*/ + uint32_t saved_layer_count = pCreateInfo->enabledLayerCount; + char **saved_layer_names; + char **saved_layer_ptr; + saved_layer_names = + loader_stack_alloc(sizeof(char *) * pCreateInfo->enabledLayerCount); + for (uint32_t i = 0; i < saved_layer_count; i++) { + saved_layer_names[i] = (char *)pCreateInfo->ppEnabledLayerNames[i]; + } + saved_layer_ptr = (char **)pCreateInfo->ppEnabledLayerNames; + + loader_expand_layer_names( + ptr_instance, std_validation_str, + sizeof(std_validation_names) / sizeof(std_validation_names[0]), + std_validation_names, (uint32_t *)&pCreateInfo->enabledLayerCount, + (char ***)&pCreateInfo->ppEnabledLayerNames); + + /* Scan/discover all ICD libraries */ + memset(&ptr_instance->icd_libs, 0, sizeof(ptr_instance->icd_libs)); + loader_icd_scan(ptr_instance, &ptr_instance->icd_libs); + + /* get extensions from all ICD's, merge so no duplicates, then validate */ + loader_get_icd_loader_instance_extensions( + ptr_instance, &ptr_instance->icd_libs, &ptr_instance->ext_list); + res = loader_validate_instance_extensions( + ptr_instance, &ptr_instance->ext_list, + &ptr_instance->instance_layer_list, pCreateInfo); + if (res != VK_SUCCESS) { + loader_unexpand_inst_layer_names(ptr_instance, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->device_layer_list); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->instance_layer_list); + loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_libs); + loader_destroy_generic_list( + ptr_instance, + (struct loader_generic_list *)&ptr_instance->ext_list); + util_DestroyDebugReportCallback(ptr_instance, instance_callback, NULL); + loader_platform_thread_unlock_mutex(&loader_lock); + loader_heap_free(ptr_instance, ptr_instance); + return res; + } + + ptr_instance->disp = + loader_heap_alloc(ptr_instance, sizeof(VkLayerInstanceDispatchTable), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (ptr_instance->disp == NULL) { + loader_unexpand_inst_layer_names(ptr_instance, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->device_layer_list); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->instance_layer_list); + loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_libs); + loader_destroy_generic_list( + ptr_instance, + (struct loader_generic_list *)&ptr_instance->ext_list); + util_DestroyDebugReportCallback(ptr_instance, instance_callback, NULL); + loader_platform_thread_unlock_mutex(&loader_lock); + loader_heap_free(ptr_instance, ptr_instance); + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + memcpy(ptr_instance->disp, &instance_disp, sizeof(instance_disp)); + ptr_instance->next = loader.instances; + loader.instances = ptr_instance; + + /* activate any layers on instance chain */ + res = loader_enable_instance_layers(ptr_instance, pCreateInfo, + &ptr_instance->instance_layer_list); + if (res != VK_SUCCESS) { + loader_unexpand_inst_layer_names(ptr_instance, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->device_layer_list); + loader_delete_layer_properties(ptr_instance, + &ptr_instance->instance_layer_list); + loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_libs); + loader_destroy_generic_list( + ptr_instance, + (struct loader_generic_list *)&ptr_instance->ext_list); + loader.instances = ptr_instance->next; + util_DestroyDebugReportCallback(ptr_instance, instance_callback, NULL); + loader_platform_thread_unlock_mutex(&loader_lock); + loader_heap_free(ptr_instance, ptr_instance->disp); + loader_heap_free(ptr_instance, ptr_instance); + return res; + } + + created_instance = (VkInstance)ptr_instance; + res = loader_create_instance_chain(pCreateInfo, pAllocator, ptr_instance, + &created_instance); + + if (res == VK_SUCCESS) { + wsi_create_instance(ptr_instance, pCreateInfo); + debug_report_create_instance(ptr_instance, pCreateInfo); + + *pInstance = created_instance; + + /* + * Finally have the layers in place and everyone has seen + * the CreateInstance command go by. This allows the layer's + * GetInstanceProcAddr functions to return valid extension functions + * if enabled. + */ + loader_activate_instance_layer_extensions(ptr_instance, *pInstance); + } else { + // TODO: cleanup here. + } + + /* Remove temporary debug_report callback */ + util_DestroyDebugReportCallback(ptr_instance, instance_callback, NULL); + loader_unexpand_inst_layer_names(ptr_instance, saved_layer_count, + saved_layer_names, saved_layer_ptr, + pCreateInfo); + loader_platform_thread_unlock_mutex(&loader_lock); + return res; +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyInstance(VkInstance instance, + const VkAllocationCallbacks *pAllocator) { + const VkLayerInstanceDispatchTable *disp; + struct loader_instance *ptr_instance = NULL; + disp = loader_get_instance_dispatch(instance); + + loader_platform_thread_lock_mutex(&loader_lock); + + /* TODO: Do we need a temporary callback here to catch cleanup issues? */ + + ptr_instance = loader_get_instance(instance); + disp->DestroyInstance(instance, pAllocator); + + loader_deactivate_instance_layers(ptr_instance); + loader_heap_free(ptr_instance, ptr_instance->disp); + loader_heap_free(ptr_instance, ptr_instance); + loader_platform_thread_unlock_mutex(&loader_lock); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, + VkPhysicalDevice *pPhysicalDevices) { + const VkLayerInstanceDispatchTable *disp; + VkResult res; + disp = loader_get_instance_dispatch(instance); + + loader_platform_thread_lock_mutex(&loader_lock); + res = disp->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, + pPhysicalDevices); + loader_platform_thread_unlock_mutex(&loader_lock); + return res; +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetPhysicalDeviceFeatures(VkPhysicalDevice gpu, + VkPhysicalDeviceFeatures *pFeatures) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(gpu); + disp->GetPhysicalDeviceFeatures(gpu, pFeatures); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice gpu, VkFormat format, + VkFormatProperties *pFormatInfo) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(gpu); + disp->GetPhysicalDeviceFormatProperties(gpu, format, pFormatInfo); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, + VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, + VkImageFormatProperties *pImageFormatProperties) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(physicalDevice); + return disp->GetPhysicalDeviceImageFormatProperties( + physicalDevice, format, type, tiling, usage, flags, + pImageFormatProperties); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetPhysicalDeviceProperties(VkPhysicalDevice gpu, + VkPhysicalDeviceProperties *pProperties) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(gpu); + disp->GetPhysicalDeviceProperties(gpu, pProperties); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice gpu, uint32_t *pQueueFamilyPropertyCount, + VkQueueFamilyProperties *pQueueProperties) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(gpu); + disp->GetPhysicalDeviceQueueFamilyProperties(gpu, pQueueFamilyPropertyCount, + pQueueProperties); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice gpu, VkPhysicalDeviceMemoryProperties *pMemoryProperties) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(gpu); + disp->GetPhysicalDeviceMemoryProperties(gpu, pMemoryProperties); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) { + VkResult res; + + loader_platform_thread_lock_mutex(&loader_lock); + + res = loader_CreateDevice(gpu, pCreateInfo, pAllocator, pDevice); + + loader_platform_thread_unlock_mutex(&loader_lock); + return res; +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + struct loader_device *dev; + + loader_platform_thread_lock_mutex(&loader_lock); + + struct loader_icd *icd = loader_get_icd_and_device(device, &dev); + const struct loader_instance *inst = icd->this_instance; + disp = loader_get_dispatch(device); + + disp->DestroyDevice(device, pAllocator); + dev->device = NULL; + loader_remove_logical_device(inst, icd, dev); + + loader_platform_thread_unlock_mutex(&loader_lock); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, + const char *pLayerName, + uint32_t *pPropertyCount, + VkExtensionProperties *pProperties) { + VkResult res; + + loader_platform_thread_lock_mutex(&loader_lock); + + /* If pLayerName == NULL, then querying ICD extensions, pass this call + down the instance chain which will terminate in the ICD. This allows + layers to filter the extensions coming back up the chain. + If pLayerName != NULL then get layer extensions from manifest file. */ + if (pLayerName == NULL || strlen(pLayerName) == 0) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(physicalDevice); + res = disp->EnumerateDeviceExtensionProperties( + physicalDevice, NULL, pPropertyCount, pProperties); + } else { + res = loader_EnumerateDeviceExtensionProperties( + physicalDevice, pLayerName, pPropertyCount, pProperties); + } + + loader_platform_thread_unlock_mutex(&loader_lock); + return res; +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, + uint32_t *pPropertyCount, + VkLayerProperties *pProperties) { + VkResult res; + + loader_platform_thread_lock_mutex(&loader_lock); + + /* Don't dispatch this call down the instance chain, want all device layers + enumerated and instance chain may not contain all device layers */ + res = loader_EnumerateDeviceLayerProperties(physicalDevice, pPropertyCount, + pProperties); + loader_platform_thread_unlock_mutex(&loader_lock); + return res; +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetDeviceQueue(VkDevice device, uint32_t queueNodeIndex, uint32_t queueIndex, + VkQueue *pQueue) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue); + loader_set_dispatch(*pQueue, disp); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, + VkFence fence) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(queue); + + return disp->QueueSubmit(queue, submitCount, pSubmits, fence); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(queue); + + return disp->QueueWaitIdle(queue); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->DeviceWaitIdle(device); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, + const VkAllocationCallbacks *pAllocator, + VkDeviceMemory *pMemory) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkFreeMemory(VkDevice device, VkDeviceMemory mem, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->FreeMemory(device, mem, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, + VkDeviceSize size, VkFlags flags, void **ppData) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->MapMemory(device, mem, offset, size, flags, ppData); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkUnmapMemory(VkDevice device, VkDeviceMemory mem) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->UnmapMemory(device, mem); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, + const VkMappedMemoryRange *pMemoryRanges) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->FlushMappedMemoryRanges(device, memoryRangeCount, + pMemoryRanges); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, + const VkMappedMemoryRange *pMemoryRanges) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->InvalidateMappedMemoryRanges(device, memoryRangeCount, + pMemoryRanges); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, + VkDeviceSize *pCommittedMemoryInBytes) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, + VkDeviceSize offset) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->BindBufferMemory(device, buffer, mem, offset); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, + VkDeviceSize offset) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->BindImageMemory(device, image, mem, offset); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, + VkMemoryRequirements *pMemoryRequirements) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetImageMemoryRequirements(VkDevice device, VkImage image, + VkMemoryRequirements *pMemoryRequirements) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetImageMemoryRequirements(device, image, pMemoryRequirements); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements *pSparseMemoryRequirements) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetImageSparseMemoryRequirements(device, image, + pSparseMemoryRequirementCount, + pSparseMemoryRequirements); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, + VkSampleCountFlagBits samples, VkImageUsageFlags usage, + VkImageTiling tiling, uint32_t *pPropertyCount, + VkSparseImageFormatProperties *pProperties) { + const VkLayerInstanceDispatchTable *disp; + + disp = loader_get_instance_dispatch(physicalDevice); + + disp->GetPhysicalDeviceSparseImageFormatProperties( + physicalDevice, format, type, samples, usage, tiling, pPropertyCount, + pProperties); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, + const VkBindSparseInfo *pBindInfo, VkFence fence) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(queue); + + return disp->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkFence *pFence) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateFence(device, pCreateInfo, pAllocator, pFence); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyFence(VkDevice device, VkFence fence, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyFence(device, fence, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->ResetFences(device, fenceCount, pFences); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetFenceStatus(VkDevice device, VkFence fence) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->GetFenceStatus(device, fence); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, + VkBool32 waitAll, uint64_t timeout) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->WaitForFences(device, fenceCount, pFences, waitAll, timeout); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSemaphore *pSemaphore) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroySemaphore(device, semaphore, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateEvent(device, pCreateInfo, pAllocator, pEvent); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyEvent(VkDevice device, VkEvent event, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyEvent(device, event, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetEventStatus(VkDevice device, VkEvent event) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->GetEventStatus(device, event); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkSetEvent(VkDevice device, VkEvent event) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->SetEvent(device, event); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkResetEvent(VkDevice device, VkEvent event) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->ResetEvent(device, event); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkQueryPool *pQueryPool) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyQueryPool(device, queryPool, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, + uint32_t firstQuery, uint32_t queryCount, size_t dataSize, + void *pData, VkDeviceSize stride, + VkQueryResultFlags flags) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, + dataSize, pData, stride, flags); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyBuffer(VkDevice device, VkBuffer buffer, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyBuffer(device, buffer, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkBufferView *pView) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateBufferView(device, pCreateInfo, pAllocator, pView); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyBufferView(VkDevice device, VkBufferView bufferView, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyBufferView(device, bufferView, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkImage *pImage) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateImage(device, pCreateInfo, pAllocator, pImage); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyImage(VkDevice device, VkImage image, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyImage(device, image, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetImageSubresourceLayout(VkDevice device, VkImage image, + const VkImageSubresource *pSubresource, + VkSubresourceLayout *pLayout) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetImageSubresourceLayout(device, image, pSubresource, pLayout); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkImageView *pView) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateImageView(device, pCreateInfo, pAllocator, pView); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyImageView(VkDevice device, VkImageView imageView, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyImageView(device, imageView, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateShaderModule(VkDevice device, + const VkShaderModuleCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkShaderModule *pShader) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateShaderModule(device, pCreateInfo, pAllocator, pShader); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyShaderModule(device, shaderModule, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreatePipelineCache(VkDevice device, + const VkPipelineCacheCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkPipelineCache *pPipelineCache) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreatePipelineCache(device, pCreateInfo, pAllocator, + pPipelineCache); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyPipelineCache(device, pipelineCache, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, + size_t *pDataSize, void *pData) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->GetPipelineCacheData(device, pipelineCache, pDataSize, pData); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache *pSrcCaches) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->MergePipelineCaches(device, dstCache, srcCacheCount, + pSrcCaches); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo *pCreateInfos, + const VkAllocationCallbacks *pAllocator, + VkPipeline *pPipelines) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, + pCreateInfos, pAllocator, pPipelines); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo *pCreateInfos, + const VkAllocationCallbacks *pAllocator, + VkPipeline *pPipelines) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateComputePipelines(device, pipelineCache, createInfoCount, + pCreateInfos, pAllocator, pPipelines); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyPipeline(VkDevice device, VkPipeline pipeline, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyPipeline(device, pipeline, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreatePipelineLayout(VkDevice device, + const VkPipelineLayoutCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkPipelineLayout *pPipelineLayout) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreatePipelineLayout(device, pCreateInfo, pAllocator, + pPipelineLayout); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyPipelineLayout(device, pipelineLayout, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateSampler(device, pCreateInfo, pAllocator, pSampler); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroySampler(VkDevice device, VkSampler sampler, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroySampler(device, sampler, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateDescriptorSetLayout(VkDevice device, + const VkDescriptorSetLayoutCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDescriptorSetLayout *pSetLayout) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, + pSetLayout); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyDescriptorSetLayout(VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateDescriptorPool(VkDevice device, + const VkDescriptorPoolCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDescriptorPool *pDescriptorPool) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateDescriptorPool(device, pCreateInfo, pAllocator, + pDescriptorPool); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyDescriptorPool(device, descriptorPool, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->ResetDescriptorPool(device, descriptorPool, flags); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkAllocateDescriptorSets(VkDevice device, + const VkDescriptorSetAllocateInfo *pAllocateInfo, + VkDescriptorSet *pDescriptorSets) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet *pDescriptorSets) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->FreeDescriptorSets(device, descriptorPool, descriptorSetCount, + pDescriptorSets); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, + const VkWriteDescriptorSet *pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet *pDescriptorCopies) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, + descriptorCopyCount, pDescriptorCopies); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkFramebuffer *pFramebuffer) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateFramebuffer(device, pCreateInfo, pAllocator, + pFramebuffer); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyFramebuffer(device, framebuffer, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkRenderPass *pRenderPass) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyRenderPass(device, renderPass, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, + VkExtent2D *pGranularity) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->GetRenderAreaGranularity(device, renderPass, pGranularity); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkCommandPool *pCommandPool) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->CreateCommandPool(device, pCreateInfo, pAllocator, + pCommandPool); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->DestroyCommandPool(device, commandPool, pAllocator); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkResetCommandPool(VkDevice device, VkCommandPool commandPool, + VkCommandPoolResetFlags flags) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + return disp->ResetCommandPool(device, commandPool, flags); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkAllocateCommandBuffers(VkDevice device, + const VkCommandBufferAllocateInfo *pAllocateInfo, + VkCommandBuffer *pCommandBuffers) { + const VkLayerDispatchTable *disp; + VkResult res; + + disp = loader_get_dispatch(device); + + res = disp->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); + if (res == VK_SUCCESS) { + for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) { + if (pCommandBuffers[i]) { + loader_init_dispatch(pCommandBuffers[i], disp); + } + } + } + + return res; +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer *pCommandBuffers) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(device); + + disp->FreeCommandBuffers(device, commandPool, commandBufferCount, + pCommandBuffers); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkBeginCommandBuffer(VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo *pBeginInfo) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + return disp->BeginCommandBuffer(commandBuffer, pBeginInfo); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkEndCommandBuffer(VkCommandBuffer commandBuffer) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + return disp->EndCommandBuffer(commandBuffer); +} + +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkResetCommandBuffer(VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + return disp->ResetCommandBuffer(commandBuffer, flags); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdBindPipeline(VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, + uint32_t viewportCount, const VkViewport *pViewports) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetViewport(commandBuffer, firstViewport, viewportCount, + pViewports); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, + uint32_t scissorCount, const VkRect2D *pScissors) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetLineWidth(commandBuffer, lineWidth); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, + float depthBiasClamp, float depthBiasSlopeFactor) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, + depthBiasClamp, depthBiasSlopeFactor); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, + const float blendConstants[4]) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetBlendConstants(commandBuffer, blendConstants); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, + float maxDepthBounds) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, uint32_t compareMask) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, uint32_t writeMask) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetStencilReference(VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, uint32_t reference) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetStencilReference(commandBuffer, faceMask, reference); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, + const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, + const uint32_t *pDynamicOffsets) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, + firstSet, descriptorSetCount, pDescriptorSets, + dynamicOffsetCount, pDynamicOffsets); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, + VkDeviceSize offset, VkIndexType indexType) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, + uint32_t bindingCount, const VkBuffer *pBuffers, + const VkDeviceSize *pOffsets) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, + pBuffers, pOffsets); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, + uint32_t instanceCount, uint32_t firstVertex, + uint32_t firstInstance) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, + firstInstance); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, + uint32_t instanceCount, uint32_t firstIndex, + int32_t vertexOffset, uint32_t firstInstance) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, + vertexOffset, firstInstance); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, + VkDeviceSize offset, uint32_t drawCount, uint32_t stride) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, + VkDeviceSize offset, uint32_t drawCount, + uint32_t stride) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, + stride); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, + uint32_t z) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdDispatch(commandBuffer, x, y, z); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, + VkDeviceSize offset) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdDispatchIndirect(commandBuffer, buffer, offset); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, + VkBuffer dstBuffer, uint32_t regionCount, + const VkBufferCopy *pRegions) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, + pRegions); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, + VkImageLayout srcImageLayout, VkImage dstImage, + VkImageLayout dstImageLayout, uint32_t regionCount, + const VkImageCopy *pRegions) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, + dstImageLayout, regionCount, pRegions); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, + VkImageLayout srcImageLayout, VkImage dstImage, + VkImageLayout dstImageLayout, uint32_t regionCount, + const VkImageBlit *pRegions, VkFilter filter) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, + dstImageLayout, regionCount, pRegions, filter); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, + VkImage dstImage, VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy *pRegions) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, + dstImageLayout, regionCount, pRegions); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, + VkImageLayout srcImageLayout, VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy *pRegions) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, + dstBuffer, regionCount, pRegions); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, + VkDeviceSize dstOffset, VkDeviceSize dataSize, + const uint32_t *pData) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, + VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, + VkImageLayout imageLayout, const VkClearColorValue *pColor, + uint32_t rangeCount, + const VkImageSubresourceRange *pRanges) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, + rangeCount, pRanges); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue *pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange *pRanges) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, + pDepthStencil, rangeCount, pRanges); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, + const VkClearAttachment *pAttachments, uint32_t rectCount, + const VkClearRect *pRects) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, + rectCount, pRects); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, + VkImageLayout srcImageLayout, VkImage dstImage, + VkImageLayout dstImageLayout, uint32_t regionCount, + const VkImageResolve *pRegions) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, + dstImageLayout, regionCount, pRegions); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, + VkPipelineStageFlags stageMask) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdSetEvent(commandBuffer, event, stageMask); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, + VkPipelineStageFlags stageMask) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdResetEvent(commandBuffer, event, stageMask); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, + const VkEvent *pEvents, VkPipelineStageFlags sourceStageMask, + VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, + const VkMemoryBarrier *pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier *pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier *pImageMemoryBarriers) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask, + dstStageMask, memoryBarrierCount, pMemoryBarriers, + bufferMemoryBarrierCount, pBufferMemoryBarriers, + imageMemoryBarrierCount, pImageMemoryBarriers); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier *pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier *pImageMemoryBarriers) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdPipelineBarrier( + commandBuffer, srcStageMask, dstStageMask, dependencyFlags, + memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, + pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, + uint32_t slot, VkFlags flags) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBeginQuery(commandBuffer, queryPool, slot, flags); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, + uint32_t slot) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdEndQuery(commandBuffer, queryPool, slot); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, + uint32_t firstQuery, uint32_t queryCount) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, uint32_t slot) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, + uint32_t firstQuery, uint32_t queryCount, + VkBuffer dstBuffer, VkDeviceSize dstOffset, + VkDeviceSize stride, VkFlags flags) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, + queryCount, dstBuffer, dstOffset, stride, + flags); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, + VkShaderStageFlags stageFlags, uint32_t offset, + uint32_t size, const void *pValues) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, + pValues); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo *pRenderPassBegin, + VkSubpassContents contents) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdNextSubpass(commandBuffer, contents); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdEndRenderPass(VkCommandBuffer commandBuffer) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdEndRenderPass(commandBuffer); +} + +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkCmdExecuteCommands(VkCommandBuffer commandBuffer, + uint32_t commandBuffersCount, + const VkCommandBuffer *pCommandBuffers) { + const VkLayerDispatchTable *disp; + + disp = loader_get_dispatch(commandBuffer); + + disp->CmdExecuteCommands(commandBuffer, commandBuffersCount, + pCommandBuffers); +} diff --git a/third_party/vulkan/loader/vk_loader_platform.h b/third_party/vulkan/loader/vk_loader_platform.h new file mode 100644 index 000000000..5fcc74023 --- /dev/null +++ b/third_party/vulkan/loader/vk_loader_platform.h @@ -0,0 +1,449 @@ +/* + * + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Ian Elliot + * Author: Jon Ashburn + * + */ +#pragma once + +#if defined(_WIN32) +// WinSock2.h must be included *BEFORE* windows.h +#include +#endif // _WIN32 + +#include "vulkan/vk_platform.h" +#include "vulkan/vk_sdk_platform.h" + +#if defined(__linux__) +/* Linux-specific common code: */ + +// Headers: +//#define _GNU_SOURCE 1 +// TBD: Are the contents of the following file used? +#include +// Note: The following file is for dynamic loading: +#include +#include +#include +#include +#include +#include +#include + +// VK Library Filenames, Paths, etc.: +#define PATH_SEPERATOR ':' +#define DIRECTORY_SYMBOL '/' + +#define VULKAN_ICDCONF_DIR \ + "/" \ + "vulkan" \ + "/" \ + "icd.d" +#define VULKAN_ICD_DIR \ + "/" \ + "vulkan" \ + "/" \ + "icd" +#define VULKAN_ELAYERCONF_DIR \ + "/" \ + "vulkan" \ + "/" \ + "explicit_layer.d" +#define VULKAN_ILAYERCONF_DIR \ + "/" \ + "vulkan" \ + "/" \ + "implicit_layer.d" +#define VULKAN_LAYER_DIR \ + "/" \ + "vulkan" \ + "/" \ + "layer" + +#if defined(LOCALPREFIX) +#define LOCAL_DRIVERS_INFO \ + LOCALPREFIX "/" SYSCONFDIR VULKAN_ICDCONF_DIR ":" LOCALPREFIX \ + "/" DATADIR VULKAN_ICDCONF_DIR ":" +#define LOCAL_ELAYERS_INFO \ + LOCALPREFIX "/" SYSCONFDIR VULKAN_ELAYERCONF_DIR ":" LOCALPREFIX \ + "/" DATADIR VULKAN_ELAYERCONF_DIR ":" +#define LOCAL_ILAYERS_INFO \ + LOCALPREFIX "/" SYSCONFDIR VULKAN_ILAYERCONF_DIR ":" LOCALPREFIX \ + "/" DATADIR VULKAN_ILAYERCONF_DIR ":" +#else +#define LOCAL_DRIVERS_INFO +#define LOCAL_ELAYERS_INFO +#define LOCAL_ILAYERS_INFO +#endif + +#define DEFAULT_VK_DRIVERS_INFO \ + LOCAL_DRIVERS_INFO \ + "/" SYSCONFDIR VULKAN_ICDCONF_DIR ":" \ + "/usr/" DATADIR VULKAN_ICDCONF_DIR +#define DEFAULT_VK_DRIVERS_PATH "" +#define DEFAULT_VK_ELAYERS_INFO \ + LOCAL_ELAYERS_INFO \ + "/" SYSCONFDIR VULKAN_ELAYERCONF_DIR ":" \ + "/usr/" DATADIR VULKAN_ELAYERCONF_DIR ":" +#define DEFAULT_VK_ILAYERS_INFO \ + LOCAL_ILAYERS_INFO \ + "/" SYSCONFDIR VULKAN_ILAYERCONF_DIR ":" \ + "/usr/" DATADIR VULKAN_ILAYERCONF_DIR +#define DEFAULT_VK_LAYERS_PATH "" +#define LAYERS_PATH_ENV "VK_LAYER_PATH" + +// C99: +#define PRINTF_SIZE_T_SPECIFIER "%zu" + +// File IO +static inline bool loader_platform_file_exists(const char *path) { + if (access(path, F_OK)) + return false; + else + return true; +} + +static inline bool loader_platform_is_path_absolute(const char *path) { + if (path[0] == '/') + return true; + else + return false; +} + +static inline char *loader_platform_dirname(char *path) { + return dirname(path); +} + +// Environment variables + +static inline char *loader_getenv(const char *name) { return getenv(name); } + +static inline void loader_free_getenv(const char *val) {} + +// Dynamic Loading of libraries: +typedef void *loader_platform_dl_handle; +static inline loader_platform_dl_handle +loader_platform_open_library(const char *libPath) { + return dlopen(libPath, RTLD_LAZY | RTLD_LOCAL); +} +static inline const char * +loader_platform_open_library_error(const char *libPath) { + return dlerror(); +} +static inline void +loader_platform_close_library(loader_platform_dl_handle library) { + dlclose(library); +} +static inline void * +loader_platform_get_proc_address(loader_platform_dl_handle library, + const char *name) { + assert(library); + assert(name); + return dlsym(library, name); +} +static inline const char * +loader_platform_get_proc_address_error(const char *name) { + return dlerror(); +} + +// Threads: +typedef pthread_t loader_platform_thread; +#define THREAD_LOCAL_DECL __thread +#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) \ + pthread_once_t var = PTHREAD_ONCE_INIT; +#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) pthread_once_t var; +static inline void loader_platform_thread_once(pthread_once_t *ctl, + void (*func)(void)) { + assert(func != NULL); + assert(ctl != NULL); + pthread_once(ctl, func); +} + +// Thread IDs: +typedef pthread_t loader_platform_thread_id; +static inline loader_platform_thread_id loader_platform_get_thread_id() { + return pthread_self(); +} + +// Thread mutex: +typedef pthread_mutex_t loader_platform_thread_mutex; +static inline void +loader_platform_thread_create_mutex(loader_platform_thread_mutex *pMutex) { + pthread_mutex_init(pMutex, NULL); +} +static inline void +loader_platform_thread_lock_mutex(loader_platform_thread_mutex *pMutex) { + pthread_mutex_lock(pMutex); +} +static inline void +loader_platform_thread_unlock_mutex(loader_platform_thread_mutex *pMutex) { + pthread_mutex_unlock(pMutex); +} +static inline void +loader_platform_thread_delete_mutex(loader_platform_thread_mutex *pMutex) { + pthread_mutex_destroy(pMutex); +} +typedef pthread_cond_t loader_platform_thread_cond; +static inline void +loader_platform_thread_init_cond(loader_platform_thread_cond *pCond) { + pthread_cond_init(pCond, NULL); +} +static inline void +loader_platform_thread_cond_wait(loader_platform_thread_cond *pCond, + loader_platform_thread_mutex *pMutex) { + pthread_cond_wait(pCond, pMutex); +} +static inline void +loader_platform_thread_cond_broadcast(loader_platform_thread_cond *pCond) { + pthread_cond_broadcast(pCond); +} + +#define loader_stack_alloc(size) alloca(size) + +#elif defined(_WIN32) // defined(__linux__) +/* Windows-specific common code: */ +// WinBase.h defines CreateSemaphore and synchapi.h defines CreateEvent +// undefine them to avoid conflicts with VkLayerDispatchTable struct members. +#ifdef CreateSemaphore +#undef CreateSemaphore +#endif +#ifdef CreateEvent +#undef CreateEvent +#endif +#include +#include +#include +#include +#include +#include +#ifdef __cplusplus +#include +#include +using namespace std; +#endif // __cplusplus + +// VK Library Filenames, Paths, etc.: +#define PATH_SEPERATOR ';' +#define DIRECTORY_SYMBOL '\\' +#define DEFAULT_VK_REGISTRY_HIVE HKEY_LOCAL_MACHINE +#define DEFAULT_VK_DRIVERS_INFO "SOFTWARE\\Khronos\\Vulkan\\Drivers" +// TODO: Are these the correct paths +#define DEFAULT_VK_DRIVERS_PATH "C:\\Windows\\System32;C:\\Windows\\SysWow64" +#define DEFAULT_VK_ELAYERS_INFO "SOFTWARE\\Khronos\\Vulkan\\ExplicitLayers" +#define DEFAULT_VK_ILAYERS_INFO "SOFTWARE\\Khronos\\Vulkan\\ImplicitLayers" +#define DEFAULT_VK_LAYERS_PATH "C:\\Windows\\System32;C:\\Windows\\SysWow64" +#define LAYERS_PATH_ENV "VK_LAYER_PATH" + +#define PRINTF_SIZE_T_SPECIFIER "%Iu" + +// File IO +static bool loader_platform_file_exists(const char *path) { + if ((_access(path, 0)) == -1) + return false; + else + return true; +} + +static bool loader_platform_is_path_absolute(const char *path) { + return !PathIsRelative(path); +} + +// WIN32 runtime doesn't have dirname(). +static inline char *loader_platform_dirname(char *path) { + char *current, *next; + + // TODO/TBD: Do we need to deal with the Windows's ":" character? + + for (current = path; *current != '\0'; current = next) { + next = strchr(current, DIRECTORY_SYMBOL); + if (next == NULL) { + if (current != path) + *(current - 1) = '\0'; + return path; + } else { + // Point one character past the DIRECTORY_SYMBOL: + next++; + } + } + return path; +} + +// WIN32 runtime doesn't have basename(). +// Microsoft also doesn't have basename(). Paths are different on Windows, and +// so this is just a temporary solution in order to get us compiling, so that we +// can test some scenarios, and develop the correct solution for Windows. +// TODO: Develop a better, permanent solution for Windows, to replace this +// temporary code: +static char *loader_platform_basename(char *pathname) { + char *current, *next; + + // TODO/TBD: Do we need to deal with the Windows's ":" character? + + for (current = pathname; *current != '\0'; current = next) { + next = strchr(current, DIRECTORY_SYMBOL); + if (next == NULL) { + // No more DIRECTORY_SYMBOL's so return p: + return current; + } else { + // Point one character past the DIRECTORY_SYMBOL: + next++; + } + } + // We shouldn't get to here, but this makes the compiler happy: + return current; +} + +// Environment variables + +static inline char *loader_getenv(const char *name) { + char *retVal; + DWORD valSize; + + valSize = GetEnvironmentVariableA(name, NULL, 0); + + // valSize DOES include the null terminator, so for any set variable + // will always be at least 1. If it's 0, the variable wasn't set. + if (valSize == 0) + return NULL; + + // TODO; FIXME This should be using any app defined memory allocation + retVal = (char *)malloc(valSize); + + GetEnvironmentVariableA(name, retVal, valSize); + + return retVal; +} + +static inline void loader_free_getenv(const char *val) { free((void *)val); } + +// Dynamic Loading: +typedef HMODULE loader_platform_dl_handle; +static loader_platform_dl_handle +loader_platform_open_library(const char *libPath) { + return LoadLibrary(libPath); +} +static char *loader_platform_open_library_error(const char *libPath) { + static char errorMsg[120]; + snprintf(errorMsg, 119, "Failed to open dynamic library \"%s\"", libPath); + return errorMsg; +} +static void loader_platform_close_library(loader_platform_dl_handle library) { + FreeLibrary(library); +} +static void *loader_platform_get_proc_address(loader_platform_dl_handle library, + const char *name) { + assert(library); + assert(name); + return GetProcAddress(library, name); +} +static char *loader_platform_get_proc_address_error(const char *name) { + static char errorMsg[120]; + snprintf(errorMsg, 119, "Failed to find function \"%s\" in dynamic library", + name); + return errorMsg; +} + +// Threads: +typedef HANDLE loader_platform_thread; +#define THREAD_LOCAL_DECL __declspec(thread) +#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) \ + INIT_ONCE var = INIT_ONCE_STATIC_INIT; +#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var; +static BOOL CALLBACK +InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) { + void (*func)(void) = (void (*)(void))Parameter; + func(); + return TRUE; +} + +static void loader_platform_thread_once(void *ctl, void (*func)(void)) { + assert(func != NULL); + assert(ctl != NULL); + InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, func, NULL); +} + +// Thread IDs: +typedef DWORD loader_platform_thread_id; +static loader_platform_thread_id loader_platform_get_thread_id() { + return GetCurrentThreadId(); +} + +// Thread mutex: +typedef CRITICAL_SECTION loader_platform_thread_mutex; +static void +loader_platform_thread_create_mutex(loader_platform_thread_mutex *pMutex) { + InitializeCriticalSection(pMutex); +} +static void +loader_platform_thread_lock_mutex(loader_platform_thread_mutex *pMutex) { + EnterCriticalSection(pMutex); +} +static void +loader_platform_thread_unlock_mutex(loader_platform_thread_mutex *pMutex) { + LeaveCriticalSection(pMutex); +} +static void +loader_platform_thread_delete_mutex(loader_platform_thread_mutex *pMutex) { + DeleteCriticalSection(pMutex); +} +typedef CONDITION_VARIABLE loader_platform_thread_cond; +static void +loader_platform_thread_init_cond(loader_platform_thread_cond *pCond) { + InitializeConditionVariable(pCond); +} +static void +loader_platform_thread_cond_wait(loader_platform_thread_cond *pCond, + loader_platform_thread_mutex *pMutex) { + SleepConditionVariableCS(pCond, pMutex, INFINITE); +} +static void +loader_platform_thread_cond_broadcast(loader_platform_thread_cond *pCond) { + WakeAllConditionVariable(pCond); +} + +// Windows Registry: +char *loader_get_registry_string(const HKEY hive, const LPCTSTR sub_key, + const char *value); + +#define loader_stack_alloc(size) _alloca(size) +#else // defined(_WIN32) + +#error The "loader_platform.h" file must be modified for this OS. + +// NOTE: In order to support another OS, an #elif needs to be added (above the +// "#else // defined(_WIN32)") for that OS, and OS-specific versions of the +// contents of this file must be created. + +// NOTE: Other OS-specific changes are also needed for this OS. Search for +// files with "WIN32" in it, as a quick way to find files that must be changed. + +#endif // defined(_WIN32) + +// returns true if the given string appears to be a relative or absolute +// path, as opposed to a bare filename. +static inline bool loader_platform_is_path(const char *path) { + return strchr(path, DIRECTORY_SYMBOL) != NULL; +} diff --git a/third_party/vulkan/loader/wsi.c b/third_party/vulkan/loader/wsi.c new file mode 100644 index 000000000..05945fb50 --- /dev/null +++ b/third_party/vulkan/loader/wsi.c @@ -0,0 +1,1092 @@ +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Ian Elliott + * Author: Jon Ashburn + * Author: Ian Elliott + * Author: Mark Lobodzinski + */ + +//#define _ISOC11_SOURCE /* for aligned_alloc() */ +#define _GNU_SOURCE +#include +#include +#include "vk_loader_platform.h" +#include "loader.h" +#include "wsi.h" +#include + +static const VkExtensionProperties wsi_surface_extension_info = { + .extensionName = VK_KHR_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_SURFACE_SPEC_VERSION, +}; + +#ifdef VK_USE_PLATFORM_WIN32_KHR +static const VkExtensionProperties wsi_win32_surface_extension_info = { + .extensionName = VK_KHR_WIN32_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_WIN32_SURFACE_SPEC_VERSION, +}; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_MIR_KHR +static const VkExtensionProperties wsi_mir_surface_extension_info = { + .extensionName = VK_KHR_MIR_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_MIR_SURFACE_SPEC_VERSION, +}; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +static const VkExtensionProperties wsi_wayland_surface_extension_info = { + .extensionName = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_WAYLAND_SURFACE_SPEC_VERSION, +}; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +static const VkExtensionProperties wsi_xcb_surface_extension_info = { + .extensionName = VK_KHR_XCB_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_XCB_SURFACE_SPEC_VERSION, +}; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +static const VkExtensionProperties wsi_xlib_surface_extension_info = { + .extensionName = VK_KHR_XLIB_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_XLIB_SURFACE_SPEC_VERSION, +}; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +static const VkExtensionProperties wsi_android_surface_extension_info = { + .extensionName = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME, + .specVersion = VK_KHR_ANDROID_SURFACE_REVISION, +}; +#endif // VK_USE_PLATFORM_ANDROID_KHR + +void wsi_add_instance_extensions(const struct loader_instance *inst, + struct loader_extension_list *ext_list) { + loader_add_to_ext_list(inst, ext_list, 1, &wsi_surface_extension_info); +#ifdef VK_USE_PLATFORM_WIN32_KHR + loader_add_to_ext_list(inst, ext_list, 1, + &wsi_win32_surface_extension_info); +#endif // VK_USE_PLATFORM_WIN32_KHR +#ifdef VK_USE_PLATFORM_MIR_KHR + loader_add_to_ext_list(inst, ext_list, 1, &wsi_mir_surface_extension_info); +#endif // VK_USE_PLATFORM_MIR_KHR +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + loader_add_to_ext_list(inst, ext_list, 1, + &wsi_wayland_surface_extension_info); +#endif // VK_USE_PLATFORM_WAYLAND_KHR +#ifdef VK_USE_PLATFORM_XCB_KHR + loader_add_to_ext_list(inst, ext_list, 1, &wsi_xcb_surface_extension_info); +#endif // VK_USE_PLATFORM_XCB_KHR +#ifdef VK_USE_PLATFORM_XLIB_KHR + loader_add_to_ext_list(inst, ext_list, 1, &wsi_xlib_surface_extension_info); +#endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_ANDROID_KHR + loader_add_to_ext_list(inst, ext_list, 1, + &wsi_android_surface_extension_info); +#endif // VK_USE_PLATFORM_ANDROID_KHR +} + +void wsi_create_instance(struct loader_instance *ptr_instance, + const VkInstanceCreateInfo *pCreateInfo) { + ptr_instance->wsi_surface_enabled = false; + +#ifdef VK_USE_PLATFORM_WIN32_KHR + ptr_instance->wsi_win32_surface_enabled = true; +#endif // VK_USE_PLATFORM_WIN32_KHR +#ifdef VK_USE_PLATFORM_MIR_KHR + ptr_instance->wsi_mir_surface_enabled = false; +#endif // VK_USE_PLATFORM_MIR_KHR +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + ptr_instance->wsi_wayland_surface_enabled = false; +#endif // VK_USE_PLATFORM_WAYLAND_KHR +#ifdef VK_USE_PLATFORM_XCB_KHR + ptr_instance->wsi_xcb_surface_enabled = false; +#endif // VK_USE_PLATFORM_XCB_KHR +#ifdef VK_USE_PLATFORM_XLIB_KHR + ptr_instance->wsi_xlib_surface_enabled = false; +#endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_ANDROID_KHR + ptr_instance->wsi_android_surface_enabled = false; +#endif // VK_USE_PLATFORM_ANDROID_KHR + + for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) { + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_surface_enabled = true; + continue; + } +#ifdef VK_USE_PLATFORM_WIN32_KHR + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_win32_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_WIN32_KHR +#ifdef VK_USE_PLATFORM_MIR_KHR + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_mir_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_MIR_KHR +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_wayland_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_WAYLAND_KHR +#ifdef VK_USE_PLATFORM_XCB_KHR + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_xcb_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_XCB_KHR +#ifdef VK_USE_PLATFORM_XLIB_KHR + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_xlib_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_ANDROID_KHR + if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], + VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) { + ptr_instance->wsi_android_surface_enabled = true; + continue; + } +#endif // VK_USE_PLATFORM_ANDROID_KHR + } +} + +/* + * Functions for the VK_KHR_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for DestroySurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, + const VkAllocationCallbacks *pAllocator) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + disp->DestroySurfaceKHR(instance, surface, pAllocator); +} + +/* + * This is the instance chain terminator function + * for DestroySurfaceKHR + */ +VKAPI_ATTR void VKAPI_CALL +loader_DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, + const VkAllocationCallbacks *pAllocator) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + + loader_heap_free(ptr_instance, (void *)surface); +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceSurfaceSupportKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 *pSupported) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkResult res = disp->GetPhysicalDeviceSurfaceSupportKHR( + physicalDevice, queueFamilyIndex, surface, pSupported); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceSurfaceSupportKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 *pSupported) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert(pSupported && + "GetPhysicalDeviceSurfaceSupportKHR: Error, null pSupported"); + *pSupported = false; + + assert(icd->GetPhysicalDeviceSurfaceSupportKHR && + "loader: null GetPhysicalDeviceSurfaceSupportKHR ICD pointer"); + + return icd->GetPhysicalDeviceSurfaceSupportKHR( + phys_dev->phys_dev, queueFamilyIndex, surface, pSupported); +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceSurfaceCapabilitiesKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkResult res = disp->GetPhysicalDeviceSurfaceCapabilitiesKHR( + physicalDevice, surface, pSurfaceCapabilities); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceSurfaceCapabilitiesKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL loader_GetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert(pSurfaceCapabilities && "GetPhysicalDeviceSurfaceCapabilitiesKHR: " + "Error, null pSurfaceCapabilities"); + + assert(icd->GetPhysicalDeviceSurfaceCapabilitiesKHR && + "loader: null GetPhysicalDeviceSurfaceCapabilitiesKHR ICD pointer"); + + return icd->GetPhysicalDeviceSurfaceCapabilitiesKHR( + phys_dev->phys_dev, surface, pSurfaceCapabilities); +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceSurfaceFormatsKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t *pSurfaceFormatCount, + VkSurfaceFormatKHR *pSurfaceFormats) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkResult res = disp->GetPhysicalDeviceSurfaceFormatsKHR( + physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceSurfaceFormatsKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t *pSurfaceFormatCount, + VkSurfaceFormatKHR *pSurfaceFormats) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert( + pSurfaceFormatCount && + "GetPhysicalDeviceSurfaceFormatsKHR: Error, null pSurfaceFormatCount"); + + assert(icd->GetPhysicalDeviceSurfaceFormatsKHR && + "loader: null GetPhysicalDeviceSurfaceFormatsKHR ICD pointer"); + + return icd->GetPhysicalDeviceSurfaceFormatsKHR( + phys_dev->phys_dev, surface, pSurfaceFormatCount, pSurfaceFormats); +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceSurfacePresentModesKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t *pPresentModeCount, + VkPresentModeKHR *pPresentModes) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkResult res = disp->GetPhysicalDeviceSurfacePresentModesKHR( + physicalDevice, surface, pPresentModeCount, pPresentModes); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceSurfacePresentModesKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL loader_GetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, + uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert(pPresentModeCount && "GetPhysicalDeviceSurfacePresentModesKHR: " + "Error, null pPresentModeCount"); + + assert(icd->GetPhysicalDeviceSurfacePresentModesKHR && + "loader: null GetPhysicalDeviceSurfacePresentModesKHR ICD pointer"); + + return icd->GetPhysicalDeviceSurfacePresentModesKHR( + phys_dev->phys_dev, surface, pPresentModeCount, pPresentModes); +} + +/* + * Functions for the VK_KHR_swapchain extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateSwapchainKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateSwapchainKHR(VkDevice device, + const VkSwapchainCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSwapchainKHR *pSwapchain) { + const VkLayerDispatchTable *disp; + disp = loader_get_dispatch(device); + VkResult res = + disp->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); + return res; +} + +/* + * This is the trampoline entrypoint + * for DestroySwapchainKHR + */ +LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL +vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, + const VkAllocationCallbacks *pAllocator) { + const VkLayerDispatchTable *disp; + disp = loader_get_dispatch(device); + disp->DestroySwapchainKHR(device, swapchain, pAllocator); +} + +/* + * This is the trampoline entrypoint + * for GetSwapchainImagesKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, + uint32_t *pSwapchainImageCount, + VkImage *pSwapchainImages) { + const VkLayerDispatchTable *disp; + disp = loader_get_dispatch(device); + VkResult res = disp->GetSwapchainImagesKHR( + device, swapchain, pSwapchainImageCount, pSwapchainImages); + return res; +} + +/* + * This is the trampoline entrypoint + * for AcquireNextImageKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, + uint64_t timeout, VkSemaphore semaphore, VkFence fence, + uint32_t *pImageIndex) { + const VkLayerDispatchTable *disp; + disp = loader_get_dispatch(device); + VkResult res = disp->AcquireNextImageKHR(device, swapchain, timeout, + semaphore, fence, pImageIndex); + return res; +} + +/* + * This is the trampoline entrypoint + * for QueuePresentKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) { + const VkLayerDispatchTable *disp; + disp = loader_get_dispatch(queue); + VkResult res = disp->QueuePresentKHR(queue, pPresentInfo); + return res; +} + +#ifdef VK_USE_PLATFORM_WIN32_KHR + +/* + * Functions for the VK_KHR_win32_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateWin32SurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateWin32SurfaceKHR(VkInstance instance, + const VkWin32SurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + VkResult res; + + res = disp->CreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator, + pSurface); + return res; +} + +/* + * This is the instance chain terminator function + * for CreateWin32SurfaceKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateWin32SurfaceKHR(VkInstance instance, + const VkWin32SurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + VkIcdSurfaceWin32 *pIcdSurface = NULL; + + pIcdSurface = loader_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceWin32), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (pIcdSurface == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_WIN32; + pIcdSurface->hinstance = pCreateInfo->hinstance; + pIcdSurface->hwnd = pCreateInfo->hwnd; + + *pSurface = (VkSurfaceKHR)pIcdSurface; + + return VK_SUCCESS; +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceWin32PresentationSupportKHR + */ +LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL +vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkBool32 res = disp->GetPhysicalDeviceWin32PresentationSupportKHR( + physicalDevice, queueFamilyIndex); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceWin32PresentationSupportKHR + */ +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert(icd->GetPhysicalDeviceWin32PresentationSupportKHR && + "loader: null GetPhysicalDeviceWin32PresentationSupportKHR ICD " + "pointer"); + + return icd->GetPhysicalDeviceWin32PresentationSupportKHR(phys_dev->phys_dev, + queueFamilyIndex); +} +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_MIR_KHR + +/* + * Functions for the VK_KHR_mir_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateMirSurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateMirSurfaceKHR(VkInstance instance, + const VkMirSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + VkResult res; + + res = + disp->CreateMirSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + return res; +} + +/* + * This is the instance chain terminator function + * for CreateMirSurfaceKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateMirSurfaceKHR(VkInstance instance, + const VkMirSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + VkIcdSurfaceMir *pIcdSurface = NULL; + + pIcdSurface = loader_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceMir), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (pIcdSurface == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_MIR; + pIcdSurface->connection = pCreateInfo->connection; + pIcdSurface->mirSurface = pCreateInfo->mirSurface; + + *pSurface = (VkSurfaceKHR)pIcdSurface; + + return VK_SUCCESS; +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceMirPresentationSupportKHR + */ +LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL +vkGetPhysicalDeviceMirPresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + MirConnection *connection) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkBool32 res = disp->GetPhysicalDeviceMirPresentationSupportKHR( + physicalDevice, queueFamilyIndex, connection); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceMirPresentationSupportKHR + */ +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceMirPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + MirConnection *connection) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert( + icd->GetPhysicalDeviceMirPresentationSupportKHR && + "loader: null GetPhysicalDeviceMirPresentationSupportKHR ICD pointer"); + + return icd->GetPhysicalDeviceMirPresentationSupportKHR( + phys_dev->phys_dev, queueFamilyIndex, connection); +} +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + +/* + * Functions for the VK_KHR_wayland_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateWaylandSurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateWaylandSurfaceKHR(VkInstance instance, + const VkMirSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + VkResult res; + + res = disp->CreateWaylandSurfaceKHR(instance, pCreateInfo, pAllocator, + pSurface); + return res; +} + +/* + * This is the instance chain terminator function + * for CreateXlibSurfaceKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateWaylandSurfaceKHR(VkInstance instance, + const VkMirSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + VkIcdSurfaceWayland *pIcdSurface = NULL; + + pIcdSurface = loader_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceWayland), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (pIcdSurface == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND; + pIcdSurface->display = pCreateInfo->display; + pIcdSurface->surface = pCreateInfo->surface; + + *pSurface = (VkSurfaceKHR)pIcdSurface; + + return VK_SUCCESS; +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceWaylandPresentationSupportKHR + */ +LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL +vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + struct wl_display *display) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkBool32 res = disp->GetPhysicalDeviceWaylandPresentationSupportKHR( + physicalDevice, queueFamilyIndex, display); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceWaylandPresentationSupportKHR + */ +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + struct wl_display *display) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert(icd->GetPhysicalDeviceWaylandPresentationSupportKHR && + "loader: null GetPhysicalDeviceWaylandPresentationSupportKHR ICD " + "pointer"); + + return icd->GetPhysicalDeviceWaylandPresentationSupportKHR( + phys_dev->phys_dev, queueFamilyIndex, display); +} +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR + +/* + * Functions for the VK_KHR_xcb_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateXcbSurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateXcbSurfaceKHR(VkInstance instance, + const VkXcbSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + VkResult res; + + res = + disp->CreateXcbSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + return res; +} + +/* + * This is the instance chain terminator function + * for CreateXcbSurfaceKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateXcbSurfaceKHR(VkInstance instance, + const VkXcbSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + VkIcdSurfaceXcb *pIcdSurface = NULL; + + pIcdSurface = loader_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceXcb), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (pIcdSurface == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_XCB; + pIcdSurface->connection = pCreateInfo->connection; + pIcdSurface->window = pCreateInfo->window; + + *pSurface = (VkSurfaceKHR)pIcdSurface; + + return VK_SUCCESS; +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceXcbPresentationSupportKHR + */ +LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL +vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t *connection, + xcb_visualid_t visual_id) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkBool32 res = disp->GetPhysicalDeviceXcbPresentationSupportKHR( + physicalDevice, queueFamilyIndex, connection, visual_id); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceXcbPresentationSupportKHR + */ +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + xcb_connection_t *connection, xcb_visualid_t visual_id) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert( + icd->GetPhysicalDeviceXcbPresentationSupportKHR && + "loader: null GetPhysicalDeviceXcbPresentationSupportKHR ICD pointer"); + + return icd->GetPhysicalDeviceXcbPresentationSupportKHR( + phys_dev->phys_dev, queueFamilyIndex, connection, visual_id); +} +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR + +/* + * Functions for the VK_KHR_xlib_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateXlibSurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateXlibSurfaceKHR(VkInstance instance, + const VkXlibSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + VkResult res; + + res = + disp->CreateXlibSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); + return res; +} + +/* + * This is the instance chain terminator function + * for CreateXlibSurfaceKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateXlibSurfaceKHR(VkInstance instance, + const VkXlibSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + VkIcdSurfaceXlib *pIcdSurface = NULL; + + pIcdSurface = loader_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceXlib), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (pIcdSurface == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_XLIB; + pIcdSurface->dpy = pCreateInfo->dpy; + pIcdSurface->window = pCreateInfo->window; + + *pSurface = (VkSurfaceKHR)pIcdSurface; + + return VK_SUCCESS; +} + +/* + * This is the trampoline entrypoint + * for GetPhysicalDeviceXlibPresentationSupportKHR + */ +LOADER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL +vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display *dpy, VisualID visualID) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(physicalDevice); + VkBool32 res = disp->GetPhysicalDeviceXlibPresentationSupportKHR( + physicalDevice, queueFamilyIndex, dpy, visualID); + return res; +} + +/* + * This is the instance chain terminator function + * for GetPhysicalDeviceXlibPresentationSupportKHR + */ +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display *dpy, + VisualID visualID) { + struct loader_physical_device *phys_dev = + (struct loader_physical_device *)physicalDevice; + struct loader_icd *icd = phys_dev->this_icd; + + assert( + icd->GetPhysicalDeviceXlibPresentationSupportKHR && + "loader: null GetPhysicalDeviceXlibPresentationSupportKHR ICD pointer"); + + return icd->GetPhysicalDeviceXlibPresentationSupportKHR( + phys_dev->phys_dev, queueFamilyIndex, dpy, visualID); +} +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + +/* + * Functions for the VK_KHR_android_surface extension: + */ + +/* + * This is the trampoline entrypoint + * for CreateAndroidSurfaceKHR + */ +LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL +vkCreateAndroidSurfaceKHR(VkInstance instance, ANativeWindow *window, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + const VkLayerInstanceDispatchTable *disp; + disp = loader_get_instance_dispatch(instance); + VkResult res; + + res = disp->CreateAndroidSurfaceKHR(instance, window, pAllocator, pSurface); + return res; +} + +/* + * This is the instance chain terminator function + * for CreateAndroidSurfaceKHR + */ +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateAndroidSurfaceKHR(VkInstance instance, Window window, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface) { + struct loader_instance *ptr_instance = loader_get_instance(instance); + VkIcdSurfaceAndroid *pIcdSurface = NULL; + + pIcdSurface = loader_heap_alloc(ptr_instance, sizeof(VkIcdSurfaceAndroid), + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE); + if (pIcdSurface == NULL) { + return VK_ERROR_OUT_OF_HOST_MEMORY; + } + + pIcdSurface->base.platform = VK_ICD_WSI_PLATFORM_ANDROID; + pIcdSurface->dpy = dpy; + pIcdSurface->window = window; + + *pSurface = (VkSurfaceKHR)pIcdSurface; + + return VK_SUCCESS; +} + +#endif // VK_USE_PLATFORM_ANDROID_KHR + +bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, + const char *name, void **addr) { + *addr = NULL; + + /* + * Functions for the VK_KHR_surface extension: + */ + if (!strcmp("vkDestroySurfaceKHR", name)) { + *addr = ptr_instance->wsi_surface_enabled ? (void *)vkDestroySurfaceKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name)) { + *addr = ptr_instance->wsi_surface_enabled + ? (void *)vkGetPhysicalDeviceSurfaceSupportKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name)) { + *addr = ptr_instance->wsi_surface_enabled + ? (void *)vkGetPhysicalDeviceSurfaceCapabilitiesKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", name)) { + *addr = ptr_instance->wsi_surface_enabled + ? (void *)vkGetPhysicalDeviceSurfaceFormatsKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", name)) { + *addr = ptr_instance->wsi_surface_enabled + ? (void *)vkGetPhysicalDeviceSurfacePresentModesKHR + : NULL; + return true; + } + + /* + * Functions for the VK_KHR_swapchain extension: + * + * Note: This is a device extension, and its functions are statically + * exported from the loader. Per Khronos decisions, the the loader's GIPA + * function will return the trampoline function for such device-extension + * functions, regardless of whether the extension has been enabled. + */ + if (!strcmp("vkCreateSwapchainKHR", name)) { + *addr = (void *)vkCreateSwapchainKHR; + return true; + } + if (!strcmp("vkDestroySwapchainKHR", name)) { + *addr = (void *)vkDestroySwapchainKHR; + return true; + } + if (!strcmp("vkGetSwapchainImagesKHR", name)) { + *addr = (void *)vkGetSwapchainImagesKHR; + return true; + } + if (!strcmp("vkAcquireNextImageKHR", name)) { + *addr = (void *)vkAcquireNextImageKHR; + return true; + } + if (!strcmp("vkQueuePresentKHR", name)) { + *addr = (void *)vkQueuePresentKHR; + return true; + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + /* + * Functions for the VK_KHR_win32_surface extension: + */ + if (!strcmp("vkCreateWin32SurfaceKHR", name)) { + *addr = ptr_instance->wsi_win32_surface_enabled + ? (void *)vkCreateWin32SurfaceKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name)) { + *addr = ptr_instance->wsi_win32_surface_enabled + ? (void *)vkGetPhysicalDeviceWin32PresentationSupportKHR + : NULL; + return true; + } +#endif // VK_USE_PLATFORM_WIN32_KHR +#ifdef VK_USE_PLATFORM_MIR_KHR + /* + * Functions for the VK_KHR_mir_surface extension: + */ + if (!strcmp("vkCreateMirSurfaceKHR", name)) { + *addr = ptr_instance->wsi_mir_surface_enabled + ? (void *)vkCreateMirSurfaceKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name)) { + *addr = ptr_instance->wsi_mir_surface_enabled + ? (void *)vkGetPhysicalDeviceMirPresentationSupportKHR + : NULL; + return true; +#endif // VK_USE_PLATFORM_MIR_KHR +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + /* + * Functions for the VK_KHR_wayland_surface extension: + */ + if (!strcmp("vkCreateWaylandSurfaceKHR", name)) { + *addr = ptr_instance->wsi_wayland_surface_enabled + ? (void *)vkCreateWaylandSurfaceKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name)) { + *addr = + ptr_instance->wsi_wayland_surface_enabled + ? (void *)vkGetPhysicalDeviceWaylandPresentationSupportKHR + : NULL; + return true; +#endif // VK_USE_PLATFORM_WAYLAND_KHR +#ifdef VK_USE_PLATFORM_XCB_KHR + /* + * Functions for the VK_KHR_xcb_surface extension: + */ + if (!strcmp("vkCreateXcbSurfaceKHR", name)) { + *addr = ptr_instance->wsi_xcb_surface_enabled + ? (void *)vkCreateXcbSurfaceKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name)) { + *addr = + ptr_instance->wsi_xcb_surface_enabled + ? (void *)vkGetPhysicalDeviceXcbPresentationSupportKHR + : NULL; + return true; + } +#endif // VK_USE_PLATFORM_XCB_KHR +#ifdef VK_USE_PLATFORM_XLIB_KHR + /* + * Functions for the VK_KHR_xlib_surface extension: + */ + if (!strcmp("vkCreateXlibSurfaceKHR", name)) { + *addr = ptr_instance->wsi_xlib_surface_enabled + ? (void *)vkCreateXlibSurfaceKHR + : NULL; + return true; + } + if (!strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", + name)) { + *addr = + ptr_instance->wsi_xlib_surface_enabled + ? (void *)vkGetPhysicalDeviceXlibPresentationSupportKHR + : NULL; + return true; + } +#endif // VK_USE_PLATFORM_XLIB_KHR +#ifdef VK_USE_PLATFORM_ANDROID_KHR + /* + * Functions for the VK_KHR_android_surface extension: + */ + if (!strcmp("vkCreateAndroidSurfaceKHR", name)) { + *addr = ptr_instance->wsi_xlib_surface_enabled + ? (void *)vkCreateAndroidSurfaceKHR + : NULL; + return true; + } +#endif // VK_USE_PLATFORM_ANDROID_KHR + + return false; + } diff --git a/third_party/vulkan/loader/wsi.h b/third_party/vulkan/loader/wsi.h new file mode 100644 index 000000000..c0213313d --- /dev/null +++ b/third_party/vulkan/loader/wsi.h @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Author: Ian Elliott + * + */ + +#include "vk_loader_platform.h" +#include "loader.h" + +bool wsi_swapchain_instance_gpa(struct loader_instance *ptr_instance, + const char *name, void **addr); +void wsi_add_instance_extensions(const struct loader_instance *inst, + struct loader_extension_list *ext_list); + +void wsi_create_instance(struct loader_instance *ptr_instance, + const VkInstanceCreateInfo *pCreateInfo); + +VKAPI_ATTR void VKAPI_CALL +loader_DestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, + const VkAllocationCallbacks *pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_GetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 *pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL loader_GetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR *pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_GetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t *pSurfaceFormatCount, + VkSurfaceFormatKHR *pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL +loader_GetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t *pPresentModeCount, + VkPresentModeKHR *pPresentModes); + +#ifdef VK_USE_PLATFORM_WIN32_KHR +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateWin32SurfaceKHR(VkInstance instance, + const VkWin32SurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface); +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); +#endif +#ifdef VK_USE_PLATFORM_MIR_KHR +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateMirSurfaceKHR(VkInstance instance, + const VkMirSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface); +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceMirPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + MirConnection *connection); +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateWaylandSurfaceKHR(VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface); +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + struct wl_display *display); +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateXcbSurfaceKHR(VkInstance instance, + const VkXcbSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, + xcb_connection_t *connection, xcb_visualid_t visual_id); +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR +VKAPI_ATTR VkResult VKAPI_CALL +loader_CreateXlibSurfaceKHR(VkInstance instance, + const VkXlibSurfaceCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkSurfaceKHR *pSurface); +VKAPI_ATTR VkBool32 VKAPI_CALL +loader_GetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display *dpy, + VisualID visualID); +#endif diff --git a/third_party/vulkan/vk_debug_marker_layer.h b/third_party/vulkan/vk_debug_marker_layer.h new file mode 100644 index 000000000..e882b02b4 --- /dev/null +++ b/third_party/vulkan/vk_debug_marker_layer.h @@ -0,0 +1,44 @@ +// +// File: vk_debug_marker_layer.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Authors: + * Jon Ashburn + * Courtney Goeltzenleuchter + */ + +#pragma once + +#include "vulkan.h" +#include "vk_lunarg_debug_marker.h" +#include "vk_layer.h" + +typedef struct VkLayerDebugMarkerDispatchTable_ { + PFN_vkCmdDbgMarkerBegin CmdDbgMarkerBegin; + PFN_vkCmdDbgMarkerEnd CmdDbgMarkerEnd; + PFN_vkDbgSetObjectTag DbgSetObjectTag; + PFN_vkDbgSetObjectName DbgSetObjectName; +} VkLayerDebugMarkerDispatchTable; diff --git a/third_party/vulkan/vk_icd.h b/third_party/vulkan/vk_icd.h new file mode 100644 index 000000000..60b29e037 --- /dev/null +++ b/third_party/vulkan/vk_icd.h @@ -0,0 +1,114 @@ +// +// File: vk_icd.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + */ + +#ifndef VKICD_H +#define VKICD_H + +#include "vk_platform.h" + +/* + * The ICD must reserve space for a pointer for the loader's dispatch + * table, at the start of . + * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro. + */ + +#define ICD_LOADER_MAGIC 0x01CDC0DE + +typedef union _VK_LOADER_DATA { + uintptr_t loaderMagic; + void *loaderData; +} VK_LOADER_DATA; + +static inline void set_loader_magic_value(void *pNewObject) { + VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + loader_info->loaderMagic = ICD_LOADER_MAGIC; +} + +static inline bool valid_loader_magic_value(void *pNewObject) { + const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC; +} + +/* + * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that + * contains the platform-specific connection and surface information. + */ +typedef enum _VkIcdWsiPlatform { + VK_ICD_WSI_PLATFORM_MIR, + VK_ICD_WSI_PLATFORM_WAYLAND, + VK_ICD_WSI_PLATFORM_WIN32, + VK_ICD_WSI_PLATFORM_XCB, + VK_ICD_WSI_PLATFORM_XLIB, +} VkIcdWsiPlatform; + +typedef struct _VkIcdSurfaceBase { + VkIcdWsiPlatform platform; +} VkIcdSurfaceBase; + +#ifdef VK_USE_PLATFORM_MIR_KHR +typedef struct _VkIcdSurfaceMir { + VkIcdSurfaceBase base; + MirConnection *connection; + MirSurface *mirSurface; +} VkIcdSurfaceMir; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +typedef struct _VkIcdSurfaceWayland { + VkIcdSurfaceBase base; + struct wl_display *display; + struct wl_surface *surface; +} VkIcdSurfaceWayland; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_WIN32_KHR +typedef struct _VkIcdSurfaceWin32 { + VkIcdSurfaceBase base; + HINSTANCE hinstance; + HWND hwnd; +} VkIcdSurfaceWin32; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +typedef struct _VkIcdSurfaceXcb { + VkIcdSurfaceBase base; + xcb_connection_t *connection; + xcb_window_t window; +} VkIcdSurfaceXcb; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +typedef struct _VkIcdSurfaceXlib { + VkIcdSurfaceBase base; + Display *dpy; + Window window; +} VkIcdSurfaceXlib; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#endif // VKICD_H diff --git a/third_party/vulkan/vk_layer.h b/third_party/vulkan/vk_layer.h new file mode 100644 index 000000000..248704340 --- /dev/null +++ b/third_party/vulkan/vk_layer.h @@ -0,0 +1,313 @@ +// +// File: vk_layer.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + */ + +/* Need to define dispatch table + * Core struct can then have ptr to dispatch table at the top + * Along with object ptrs for current and next OBJ + */ +#pragma once + +#include "vulkan.h" +#include "vk_lunarg_debug_marker.h" +#if defined(__GNUC__) && __GNUC__ >= 4 +#define VK_LAYER_EXPORT __attribute__((visibility("default"))) +#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) +#define VK_LAYER_EXPORT __attribute__((visibility("default"))) +#else +#define VK_LAYER_EXPORT +#endif + +typedef struct VkLayerDispatchTable_ { + PFN_vkGetDeviceProcAddr GetDeviceProcAddr; + PFN_vkDestroyDevice DestroyDevice; + PFN_vkGetDeviceQueue GetDeviceQueue; + PFN_vkQueueSubmit QueueSubmit; + PFN_vkQueueWaitIdle QueueWaitIdle; + PFN_vkDeviceWaitIdle DeviceWaitIdle; + PFN_vkAllocateMemory AllocateMemory; + PFN_vkFreeMemory FreeMemory; + PFN_vkMapMemory MapMemory; + PFN_vkUnmapMemory UnmapMemory; + PFN_vkFlushMappedMemoryRanges FlushMappedMemoryRanges; + PFN_vkInvalidateMappedMemoryRanges InvalidateMappedMemoryRanges; + PFN_vkGetDeviceMemoryCommitment GetDeviceMemoryCommitment; + PFN_vkGetImageSparseMemoryRequirements GetImageSparseMemoryRequirements; + PFN_vkGetImageMemoryRequirements GetImageMemoryRequirements; + PFN_vkGetBufferMemoryRequirements GetBufferMemoryRequirements; + PFN_vkBindImageMemory BindImageMemory; + PFN_vkBindBufferMemory BindBufferMemory; + PFN_vkQueueBindSparse QueueBindSparse; + PFN_vkCreateFence CreateFence; + PFN_vkDestroyFence DestroyFence; + PFN_vkGetFenceStatus GetFenceStatus; + PFN_vkResetFences ResetFences; + PFN_vkWaitForFences WaitForFences; + PFN_vkCreateSemaphore CreateSemaphore; + PFN_vkDestroySemaphore DestroySemaphore; + PFN_vkCreateEvent CreateEvent; + PFN_vkDestroyEvent DestroyEvent; + PFN_vkGetEventStatus GetEventStatus; + PFN_vkSetEvent SetEvent; + PFN_vkResetEvent ResetEvent; + PFN_vkCreateQueryPool CreateQueryPool; + PFN_vkDestroyQueryPool DestroyQueryPool; + PFN_vkGetQueryPoolResults GetQueryPoolResults; + PFN_vkCreateBuffer CreateBuffer; + PFN_vkDestroyBuffer DestroyBuffer; + PFN_vkCreateBufferView CreateBufferView; + PFN_vkDestroyBufferView DestroyBufferView; + PFN_vkCreateImage CreateImage; + PFN_vkDestroyImage DestroyImage; + PFN_vkGetImageSubresourceLayout GetImageSubresourceLayout; + PFN_vkCreateImageView CreateImageView; + PFN_vkDestroyImageView DestroyImageView; + PFN_vkCreateShaderModule CreateShaderModule; + PFN_vkDestroyShaderModule DestroyShaderModule; + PFN_vkCreatePipelineCache CreatePipelineCache; + PFN_vkDestroyPipelineCache DestroyPipelineCache; + PFN_vkGetPipelineCacheData GetPipelineCacheData; + PFN_vkMergePipelineCaches MergePipelineCaches; + PFN_vkCreateGraphicsPipelines CreateGraphicsPipelines; + PFN_vkCreateComputePipelines CreateComputePipelines; + PFN_vkDestroyPipeline DestroyPipeline; + PFN_vkCreatePipelineLayout CreatePipelineLayout; + PFN_vkDestroyPipelineLayout DestroyPipelineLayout; + PFN_vkCreateSampler CreateSampler; + PFN_vkDestroySampler DestroySampler; + PFN_vkCreateDescriptorSetLayout CreateDescriptorSetLayout; + PFN_vkDestroyDescriptorSetLayout DestroyDescriptorSetLayout; + PFN_vkCreateDescriptorPool CreateDescriptorPool; + PFN_vkDestroyDescriptorPool DestroyDescriptorPool; + PFN_vkResetDescriptorPool ResetDescriptorPool; + PFN_vkAllocateDescriptorSets AllocateDescriptorSets; + PFN_vkFreeDescriptorSets FreeDescriptorSets; + PFN_vkUpdateDescriptorSets UpdateDescriptorSets; + PFN_vkCreateFramebuffer CreateFramebuffer; + PFN_vkDestroyFramebuffer DestroyFramebuffer; + PFN_vkCreateRenderPass CreateRenderPass; + PFN_vkDestroyRenderPass DestroyRenderPass; + PFN_vkGetRenderAreaGranularity GetRenderAreaGranularity; + PFN_vkCreateCommandPool CreateCommandPool; + PFN_vkDestroyCommandPool DestroyCommandPool; + PFN_vkResetCommandPool ResetCommandPool; + PFN_vkAllocateCommandBuffers AllocateCommandBuffers; + PFN_vkFreeCommandBuffers FreeCommandBuffers; + PFN_vkBeginCommandBuffer BeginCommandBuffer; + PFN_vkEndCommandBuffer EndCommandBuffer; + PFN_vkResetCommandBuffer ResetCommandBuffer; + PFN_vkCmdBindPipeline CmdBindPipeline; + PFN_vkCmdBindDescriptorSets CmdBindDescriptorSets; + PFN_vkCmdBindVertexBuffers CmdBindVertexBuffers; + PFN_vkCmdBindIndexBuffer CmdBindIndexBuffer; + PFN_vkCmdSetViewport CmdSetViewport; + PFN_vkCmdSetScissor CmdSetScissor; + PFN_vkCmdSetLineWidth CmdSetLineWidth; + PFN_vkCmdSetDepthBias CmdSetDepthBias; + PFN_vkCmdSetBlendConstants CmdSetBlendConstants; + PFN_vkCmdSetDepthBounds CmdSetDepthBounds; + PFN_vkCmdSetStencilCompareMask CmdSetStencilCompareMask; + PFN_vkCmdSetStencilWriteMask CmdSetStencilWriteMask; + PFN_vkCmdSetStencilReference CmdSetStencilReference; + PFN_vkCmdDraw CmdDraw; + PFN_vkCmdDrawIndexed CmdDrawIndexed; + PFN_vkCmdDrawIndirect CmdDrawIndirect; + PFN_vkCmdDrawIndexedIndirect CmdDrawIndexedIndirect; + PFN_vkCmdDispatch CmdDispatch; + PFN_vkCmdDispatchIndirect CmdDispatchIndirect; + PFN_vkCmdCopyBuffer CmdCopyBuffer; + PFN_vkCmdCopyImage CmdCopyImage; + PFN_vkCmdBlitImage CmdBlitImage; + PFN_vkCmdCopyBufferToImage CmdCopyBufferToImage; + PFN_vkCmdCopyImageToBuffer CmdCopyImageToBuffer; + PFN_vkCmdUpdateBuffer CmdUpdateBuffer; + PFN_vkCmdFillBuffer CmdFillBuffer; + PFN_vkCmdClearColorImage CmdClearColorImage; + PFN_vkCmdClearDepthStencilImage CmdClearDepthStencilImage; + PFN_vkCmdClearAttachments CmdClearAttachments; + PFN_vkCmdResolveImage CmdResolveImage; + PFN_vkCmdSetEvent CmdSetEvent; + PFN_vkCmdResetEvent CmdResetEvent; + PFN_vkCmdWaitEvents CmdWaitEvents; + PFN_vkCmdPipelineBarrier CmdPipelineBarrier; + PFN_vkCmdBeginQuery CmdBeginQuery; + PFN_vkCmdEndQuery CmdEndQuery; + PFN_vkCmdResetQueryPool CmdResetQueryPool; + PFN_vkCmdWriteTimestamp CmdWriteTimestamp; + PFN_vkCmdCopyQueryPoolResults CmdCopyQueryPoolResults; + PFN_vkCmdPushConstants CmdPushConstants; + PFN_vkCmdBeginRenderPass CmdBeginRenderPass; + PFN_vkCmdNextSubpass CmdNextSubpass; + PFN_vkCmdEndRenderPass CmdEndRenderPass; + PFN_vkCmdExecuteCommands CmdExecuteCommands; + PFN_vkCreateSwapchainKHR CreateSwapchainKHR; + PFN_vkDestroySwapchainKHR DestroySwapchainKHR; + PFN_vkGetSwapchainImagesKHR GetSwapchainImagesKHR; + PFN_vkAcquireNextImageKHR AcquireNextImageKHR; + PFN_vkQueuePresentKHR QueuePresentKHR; +} VkLayerDispatchTable; + +typedef struct VkLayerInstanceDispatchTable_ { + PFN_vkGetInstanceProcAddr GetInstanceProcAddr; + PFN_vkDestroyInstance DestroyInstance; + PFN_vkEnumeratePhysicalDevices EnumeratePhysicalDevices; + PFN_vkGetPhysicalDeviceFeatures GetPhysicalDeviceFeatures; + PFN_vkGetPhysicalDeviceImageFormatProperties + GetPhysicalDeviceImageFormatProperties; + PFN_vkGetPhysicalDeviceFormatProperties GetPhysicalDeviceFormatProperties; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties + GetPhysicalDeviceSparseImageFormatProperties; + PFN_vkGetPhysicalDeviceProperties GetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceQueueFamilyProperties + GetPhysicalDeviceQueueFamilyProperties; + PFN_vkGetPhysicalDeviceMemoryProperties GetPhysicalDeviceMemoryProperties; + PFN_vkEnumerateDeviceExtensionProperties EnumerateDeviceExtensionProperties; + PFN_vkEnumerateDeviceLayerProperties EnumerateDeviceLayerProperties; + PFN_vkDestroySurfaceKHR DestroySurfaceKHR; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR GetPhysicalDeviceSurfaceSupportKHR; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR + GetPhysicalDeviceSurfaceCapabilitiesKHR; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR GetPhysicalDeviceSurfaceFormatsKHR; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR + GetPhysicalDeviceSurfacePresentModesKHR; + PFN_vkCreateDebugReportCallbackEXT CreateDebugReportCallbackEXT; + PFN_vkDestroyDebugReportCallbackEXT DestroyDebugReportCallbackEXT; + PFN_vkDebugReportMessageEXT DebugReportMessageEXT; +#ifdef VK_USE_PLATFORM_MIR_KHR + PFN_vkCreateMirSurfaceKHR CreateMirSurfaceKHR; + PFN_vkGetPhysicalDeviceMirPresentationSupportKHR + GetPhysicalDeviceMirPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkCreateWaylandSurfaceKHR CreateWaylandSurfaceKHR; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR + GetPhysicalDeviceWaylandPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkCreateWin32SurfaceKHR CreateWin32SurfaceKHR; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR + GetPhysicalDeviceWin32PresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkCreateXcbSurfaceKHR CreateXcbSurfaceKHR; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR + GetPhysicalDeviceXcbPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkCreateXlibSurfaceKHR CreateXlibSurfaceKHR; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR + GetPhysicalDeviceXlibPresentationSupportKHR; +#endif +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkCreateAndroidSurfaceKHR CreateAndroidSurfaceKHR; +#endif +} VkLayerInstanceDispatchTable; + +// LL node for tree of dbg callback functions +typedef struct VkLayerDbgFunctionNode_ { + VkDebugReportCallbackEXT msgCallback; + PFN_vkDebugReportCallbackEXT pfnMsgCallback; + VkFlags msgFlags; + void *pUserData; + struct VkLayerDbgFunctionNode_ *pNext; +} VkLayerDbgFunctionNode; + +typedef enum VkLayerDbgAction_ { + VK_DBG_LAYER_ACTION_IGNORE = 0x0, + VK_DBG_LAYER_ACTION_CALLBACK = 0x1, + VK_DBG_LAYER_ACTION_LOG_MSG = 0x2, + VK_DBG_LAYER_ACTION_BREAK = 0x4, + VK_DBG_LAYER_ACTION_DEBUG_OUTPUT = 0x8, +} VkLayerDbgAction; + +// ------------------------------------------------------------------------------------------------ +// CreateInstance and CreateDevice support structures + +typedef enum VkLayerFunction_ { + VK_LAYER_LINK_INFO = 0, + VK_LAYER_DEVICE_INFO = 1, + VK_LAYER_INSTANCE_INFO = 2 +} VkLayerFunction; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerInstanceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerInstanceInfo_ { + void *instance_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerInstanceInfo; + +typedef struct VkLayerInstanceLink_ { + struct VkLayerInstanceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerInstanceLink; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerDeviceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerDeviceInfo_ { + void *device_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerDeviceInfo; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LAYER_INSTANCE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerInstanceLink *pLayerInfo; + VkLayerInstanceInfo instanceInfo; + } u; +} VkLayerInstanceCreateInfo; + +typedef struct VkLayerDeviceLink_ { + struct VkLayerDeviceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr; +} VkLayerDeviceLink; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LAYER_DEVICE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerDeviceLink *pLayerInfo; + VkLayerDeviceInfo deviceInfo; + } u; +} VkLayerDeviceCreateInfo; + +// ------------------------------------------------------------------------------------------------ +// API functions diff --git a/third_party/vulkan/vk_lunarg_debug_marker.h b/third_party/vulkan/vk_lunarg_debug_marker.h new file mode 100644 index 000000000..edff2b9ee --- /dev/null +++ b/third_party/vulkan/vk_lunarg_debug_marker.h @@ -0,0 +1,98 @@ +// +// File: vk_lunarg_debug_marker.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + * + * Authors: + * Jon Ashburn + * Courtney Goeltzenleuchter + */ + +#ifndef __VK_DEBUG_MARKER_H__ +#define __VK_DEBUG_MARKER_H__ + +#include "vulkan.h" + +#define VK_DEBUG_MARKER_EXTENSION_NUMBER 6 +#define VK_DEBUG_MARKER_EXTENSION_REVISION 1 +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +/* +*************************************************************************************************** +* DebugMarker Vulkan Extension API +*************************************************************************************************** +*/ + +#define DEBUG_MARKER_EXTENSION_NAME "VK_LUNARG_DEBUG_MARKER" + +// ------------------------------------------------------------------------------------------------ +// Enumerations + +#define VK_DEBUG_MARKER_ENUM_EXTEND(type, id) \ + ((type)(VK_DEBUG_MARKER_EXTENSION_NUMBER * -1000 + (id))) + +#define VK_OBJECT_INFO_TYPE_DBG_OBJECT_TAG \ + VK_DEBUG_MARKER_ENUM_EXTEND(VkDbgObjectInfoType, 0) +#define VK_OBJECT_INFO_TYPE_DBG_OBJECT_NAME \ + VK_DEBUG_MARKER_ENUM_EXTEND(VkDbgObjectInfoType, 1) + +// ------------------------------------------------------------------------------------------------ +// API functions + +typedef void(VKAPI_PTR *PFN_vkCmdDbgMarkerBegin)(VkCommandBuffer commandBuffer, + const char *pMarker); +typedef void(VKAPI_PTR *PFN_vkCmdDbgMarkerEnd)(VkCommandBuffer commandBuffer); +typedef VkResult(VKAPI_PTR *PFN_vkDbgSetObjectTag)( + VkDevice device, VkDebugReportObjectTypeEXT objType, uint64_t object, + size_t tagSize, const void *pTag); +typedef VkResult(VKAPI_PTR *PFN_vkDbgSetObjectName)( + VkDevice device, VkDebugReportObjectTypeEXT objType, uint64_t object, + size_t nameSize, const char *pName); + +#ifndef VK_NO_PROTOTYPES + +// DebugMarker extension entrypoints +VKAPI_ATTR void VKAPI_CALL +vkCmdDbgMarkerBegin(VkCommandBuffer commandBuffer, const char *pMarker); + +VKAPI_ATTR void VKAPI_CALL vkCmdDbgMarkerEnd(VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL +vkDbgSetObjectTag(VkDevice device, VkDebugReportObjectTypeEXT objType, + uint64_t object, size_t tagSize, const void *pTag); + +VKAPI_ATTR VkResult VKAPI_CALL +vkDbgSetObjectName(VkDevice device, VkDebugReportObjectTypeEXT objType, + uint64_t object, size_t nameSize, const char *pName); + +#endif // VK_NO_PROTOTYPES + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif // __VK_DEBUG_MARKER_H__ diff --git a/third_party/vulkan/vk_platform.h b/third_party/vulkan/vk_platform.h new file mode 100644 index 000000000..a53e725a9 --- /dev/null +++ b/third_party/vulkan/vk_platform.h @@ -0,0 +1,127 @@ +// +// File: vk_platform.h +// +/* +** Copyright (c) 2014-2015 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ + + +#ifndef __VK_PLATFORM_H__ +#define __VK_PLATFORM_H__ + +#ifdef __cplusplus +extern "C" +{ +#endif // __cplusplus + +/* +*************************************************************************************************** +* Platform-specific directives and type declarations +*************************************************************************************************** +*/ + +/* Platform-specific calling convention macros. + * + * Platforms should define these so that Vulkan clients call Vulkan commands + * with the same calling conventions that the Vulkan implementation expects. + * + * VKAPI_ATTR - Placed before the return type in function declarations. + * Useful for C++11 and GCC/Clang-style function attribute syntax. + * VKAPI_CALL - Placed after the return type in function declarations. + * Useful for MSVC-style calling convention syntax. + * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. + * + * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); + * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); + */ +#if defined(_WIN32) + // On Windows, Vulkan commands use the stdcall convention + #define VKAPI_ATTR + #define VKAPI_CALL __stdcall + #define VKAPI_PTR VKAPI_CALL +#elif defined(__ANDROID__) && defined(__ARM_EABI__) && !defined(__ARM_ARCH_7A__) + // Android does not support Vulkan in native code using the "armeabi" ABI. + #error "Vulkan requires the 'armeabi-v7a' or 'armeabi-v7a-hard' ABI on 32-bit ARM CPUs" +#elif defined(__ANDROID__) && defined(__ARM_ARCH_7A__) + // On Android/ARMv7a, Vulkan functions use the armeabi-v7a-hard calling + // convention, even if the application's native code is compiled with the + // armeabi-v7a calling convention. + #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) + #define VKAPI_CALL + #define VKAPI_PTR VKAPI_ATTR +#else + // On other platforms, use the default calling convention + #define VKAPI_ATTR + #define VKAPI_CALL + #define VKAPI_PTR +#endif + +#include + +#if !defined(VK_NO_STDINT_H) + #if defined(_MSC_VER) && (_MSC_VER < 1600) + typedef signed __int8 int8_t; + typedef unsigned __int8 uint8_t; + typedef signed __int16 int16_t; + typedef unsigned __int16 uint16_t; + typedef signed __int32 int32_t; + typedef unsigned __int32 uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif +#endif // !defined(VK_NO_STDINT_H) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +// Platform-specific headers required by platform window system extensions. +// These are enabled prior to #including "vulkan.h". The same enable then +// controls inclusion of the extension interfaces in vulkan.h. + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include +#endif + +#ifdef VK_USE_PLATFORM_MIR_KHR +#include +#endif + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include +#endif + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#endif + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#endif + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#endif + +#endif // __VK_PLATFORM_H__ diff --git a/third_party/vulkan/vk_sdk_platform.h b/third_party/vulkan/vk_sdk_platform.h new file mode 100644 index 000000000..f79396bac --- /dev/null +++ b/third_party/vulkan/vk_sdk_platform.h @@ -0,0 +1,53 @@ +// +// File: vk_sdk_platform.h +// +/* + * Copyright (c) 2015-2016 The Khronos Group Inc. + * Copyright (c) 2015-2016 Valve Corporation + * Copyright (c) 2015-2016 LunarG, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and/or associated documentation files (the "Materials"), to + * deal in the Materials without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Materials, and to permit persons to whom the Materials are + * furnished to do so, subject to the following conditions: + * + * The above copyright notice(s) and this permission notice shall be included in + * all copies or substantial portions of the Materials. + * + * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE + * USE OR OTHER DEALINGS IN THE MATERIALS. + */ + +#ifndef VK_SDK_PLATFORM_H +#define VK_SDK_PLATFORM_H + +#if defined(_WIN32) +#define NOMINMAX +#ifndef __cplusplus +#undef inline +#define inline __inline +#endif // __cplusplus + +#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) +// C99: +// Microsoft didn't implement C99 in Visual Studio; but started adding it with +// VS2013. However, VS2013 still didn't have snprintf(). The following is a +// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the +// "CMakeLists.txt" file). +// NOTE: This is fixed in Visual Studio 2015. +#define snprintf _snprintf +#endif + +#define strdup _strdup + +#endif // _WIN32 + +#endif // VK_SDK_PLATFORM_H diff --git a/third_party/vulkan/vulkan.h b/third_party/vulkan/vulkan.h new file mode 100644 index 000000000..cd6a71ac1 --- /dev/null +++ b/third_party/vulkan/vulkan.h @@ -0,0 +1,3775 @@ +#ifndef __vulkan_h_ +#define __vulkan_h_ 1 + +#ifdef __cplusplus +extern "C" { +#endif + +/* +** Copyright (c) 2015-2016 The Khronos Group Inc. +** +** Permission is hereby granted, free of charge, to any person obtaining a +** copy of this software and/or associated documentation files (the +** "Materials"), to deal in the Materials without restriction, including +** without limitation the rights to use, copy, modify, merge, publish, +** distribute, sublicense, and/or sell copies of the Materials, and to +** permit persons to whom the Materials are furnished to do so, subject to +** the following conditions: +** +** The above copyright notice and this permission notice shall be included +** in all copies or substantial portions of the Materials. +** +** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_MAKE_VERSION(major, minor, patch) \ + (((major) << 22) | ((minor) << 12) | (patch)) + +// Vulkan API version supported by this file +#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 3) + +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff) +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff) + +#define VK_NULL_HANDLE 0 + + + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#if defined(__LP64__) || defined(_WIN64) || defined(__x86_64__) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; +#else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; +#endif + + + +typedef uint32_t VkFlags; +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkSampleMask; + +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) + +#define VK_LOD_CLAMP_NONE 1000.0f +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_WHOLE_SIZE (~0ULL) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_TRUE 1 +#define VK_FALSE 0 +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 +#define VK_UUID_SIZE 16 +#define VK_MAX_MEMORY_TYPES 32 +#define VK_MAX_MEMORY_HEAPS 16 +#define VK_MAX_EXTENSION_NAME_SIZE 256 +#define VK_MAX_DESCRIPTION_SIZE 256 + + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, + VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_RESULT_BEGIN_RANGE = VK_ERROR_FORMAT_NOT_SUPPORTED, + VK_RESULT_END_RANGE = VK_INCOMPLETE, + VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FORMAT_NOT_SUPPORTED + 1), + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, + VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, + VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, + VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, + VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, + VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, + VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, + VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, + VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, + VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, + VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, + VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, + VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, + VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, + VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, + VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, + VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, + VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, + VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, + VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, + VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, + VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, + VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, + VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, + VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, + VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, + VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, + VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, + VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, + VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, + VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, + VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, + VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, + VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, + VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, + VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, + VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, + VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, + VK_FILTER_END_RANGE = VK_FILTER_LINEAR, + VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, + VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, + VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, + VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, + VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, + VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, + VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, + VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, + VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, + VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, + VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, + VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, + VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, + VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, + VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, + VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, + VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; +typedef VkFlags VkDeviceCreateFlags; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; +typedef VkFlags VkEventCreateFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x1F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, +} VkShaderStageFlagBits; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x3, +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; +typedef VkFlags VkSamplerCreateFlags; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; +typedef VkFlags VkFramebufferCreateFlags; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FRONT_AND_BACK = 0x3, +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const uint32_t* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t x, + uint32_t y, + uint32_t z); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const uint32_t* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) + +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + + +typedef enum VkColorSpaceKHR { + VK_COLORSPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLORSPACE_BEGIN_RANGE = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + VK_COLORSPACE_END_RANGE = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + VK_COLORSPACE_RANGE_SIZE = (VK_COLORSPACE_SRGB_NONLINEAR_KHR - VK_COLORSPACE_SRGB_NONLINEAR_KHR + 1), + VK_COLORSPACE_MAX_ENUM = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_BEGIN_RANGE = VK_PRESENT_MODE_IMMEDIATE_KHR, + VK_PRESENT_MODE_END_RANGE = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + VK_PRESENT_MODE_RANGE_SIZE = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), + VK_PRESENT_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPresentModeKHR; + + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, +} VkSurfaceTransformFlagBitsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; + +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) + +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 67 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); +#endif + +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) + +#define VK_KHR_DISPLAY_SPEC_VERSION 21 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" + + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayModeCreateFlagsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR*pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" + +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#define VK_KHR_xlib_surface 1 +#include + +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" + +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; + +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif +#endif /* VK_USE_PLATFORM_XLIB_KHR */ + +#ifdef VK_USE_PLATFORM_XCB_KHR +#define VK_KHR_xcb_surface 1 +#include + +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" + +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; + +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif +#endif /* VK_USE_PLATFORM_XCB_KHR */ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#define VK_KHR_wayland_surface 1 +#include + +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 5 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" + +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; + +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif +#endif /* VK_USE_PLATFORM_WAYLAND_KHR */ + +#ifdef VK_USE_PLATFORM_MIR_KHR +#define VK_KHR_mir_surface 1 +#include + +#define VK_KHR_MIR_SURFACE_SPEC_VERSION 4 +#define VK_KHR_MIR_SURFACE_EXTENSION_NAME "VK_KHR_mir_surface" + +typedef VkFlags VkMirSurfaceCreateFlagsKHR; + +typedef struct VkMirSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkMirSurfaceCreateFlagsKHR flags; + MirConnection* connection; + MirSurface* mirSurface; +} VkMirSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMirSurfaceKHR)(VkInstance instance, const VkMirSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, MirConnection* connection); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR( + VkInstance instance, + const VkMirSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + MirConnection* connection); +#endif +#endif /* VK_USE_PLATFORM_MIR_KHR */ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#define VK_KHR_android_surface 1 +#include + +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" + +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; + +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif +#endif /* VK_USE_PLATFORM_ANDROID_KHR */ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#define VK_KHR_win32_surface 1 +#include + +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 5 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" + +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; + +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif +#endif /* VK_USE_PLATFORM_WIN32_KHR */ + +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) + +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 1 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = 28, +} VkDebugReportObjectTypeEXT; + +typedef enum VkDebugReportErrorEXT { + VK_DEBUG_REPORT_ERROR_NONE_EXT = 0, + VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT = 1, +} VkDebugReportErrorEXT; + + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + +#ifdef __cplusplus +} +#endif + +#endif