Merge branch 'master'

This commit is contained in:
DrChat 2018-02-16 16:33:49 -06:00
commit db53559611
30 changed files with 8426 additions and 1948 deletions

View File

@ -42,15 +42,16 @@ git:
submodules: false
before_script:
- export LIBVULKAN_VERSION=1.0.61.1
- export CXX=$CXX_COMPILER
- export CC=$C_COMPILER
# Dump useful info.
- $CXX --version
- python3 --version
# Add Vulkan dependencies
- travis_retry wget http://mirrors.kernel.org/ubuntu/pool/universe/v/vulkan/libvulkan1_1.0.42.0+dfsg1-1ubuntu1~16.04.1_amd64.deb
- travis_retry wget http://mirrors.kernel.org/ubuntu/pool/universe/v/vulkan/libvulkan-dev_1.0.42.0+dfsg1-1ubuntu1~16.04.1_amd64.deb
- if [[ $BUILD == true ]]; then sudo dpkg -i libvulkan1_1.0.42.0+dfsg1-1ubuntu1~16.04.1_amd64.deb libvulkan-dev_1.0.42.0+dfsg1-1ubuntu1~16.04.1_amd64.deb; fi
- travis_retry wget http://mirrors.kernel.org/ubuntu/pool/universe/v/vulkan/libvulkan1_$LIBVULKAN_VERSION+dfsg1-1ubuntu1~16.04.1_amd64.deb
- travis_retry wget http://mirrors.kernel.org/ubuntu/pool/universe/v/vulkan/libvulkan-dev_$LIBVULKAN_VERSION+dfsg1-1ubuntu1~16.04.1_amd64.deb
- if [[ $BUILD == true ]]; then sudo dpkg -i libvulkan1_$LIBVULKAN_VERSION+dfsg1-1ubuntu1~16.04.1_amd64.deb libvulkan-dev_$LIBVULKAN_VERSION+dfsg1-1ubuntu1~16.04.1_amd64.deb; fi
# Prepare environment (pull dependencies, build tools).
- travis_retry ./xenia-build setup

View File

@ -24,19 +24,37 @@ void copy_128_aligned(void* dest, const void* src, size_t count) {
}
#if XE_ARCH_AMD64
void copy_and_swap_16_aligned(void* dest, const void* src, size_t count) {
return copy_and_swap_16_unaligned(dest, src, count);
void copy_and_swap_16_aligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint16_t*>(dest_ptr);
auto src = reinterpret_cast<const uint16_t*>(src_ptr);
__m128i shufmask =
_mm_set_epi8(0x0E, 0x0F, 0x0C, 0x0D, 0x0A, 0x0B, 0x08, 0x09, 0x06, 0x07,
0x04, 0x05, 0x02, 0x03, 0x00, 0x01);
size_t i;
for (i = 0; i + 8 <= count; i += 8) {
__m128i input = _mm_load_si128(reinterpret_cast<const __m128i*>(&src[i]));
__m128i output = _mm_shuffle_epi8(input, shufmask);
_mm_store_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
dest[i] = byte_swap(src[i]);
}
}
void copy_and_swap_16_unaligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint16_t*>(dest_ptr);
auto src = reinterpret_cast<const uint16_t*>(src_ptr);
__m128i shufmask =
_mm_set_epi8(0x0E, 0x0F, 0x0C, 0x0D, 0x0A, 0x0B, 0x08, 0x09, 0x06, 0x07,
0x04, 0x05, 0x02, 0x03, 0x00, 0x01);
size_t i;
for (i = 0; i + 8 <= count; i += 8) {
__m128i input = _mm_loadu_si128(reinterpret_cast<const __m128i*>(&src[i]));
__m128i output =
_mm_or_si128(_mm_slli_epi16(input, 8), _mm_srli_epi16(input, 8));
__m128i output = _mm_shuffle_epi8(input, shufmask);
_mm_storeu_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
@ -44,30 +62,37 @@ void copy_and_swap_16_unaligned(void* dest_ptr, const void* src_ptr,
}
}
void copy_and_swap_32_aligned(void* dest, const void* src, size_t count) {
return copy_and_swap_32_unaligned(dest, src, count);
void copy_and_swap_32_aligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint32_t*>(dest_ptr);
auto src = reinterpret_cast<const uint32_t*>(src_ptr);
__m128i shufmask =
_mm_set_epi8(0x0C, 0x0D, 0x0E, 0x0F, 0x08, 0x09, 0x0A, 0x0B, 0x04, 0x05,
0x06, 0x07, 0x00, 0x01, 0x02, 0x03);
size_t i;
for (i = 0; i + 4 <= count; i += 4) {
__m128i input = _mm_load_si128(reinterpret_cast<const __m128i*>(&src[i]));
__m128i output = _mm_shuffle_epi8(input, shufmask);
_mm_store_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
dest[i] = byte_swap(src[i]);
}
}
void copy_and_swap_32_unaligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint32_t*>(dest_ptr);
auto src = reinterpret_cast<const uint32_t*>(src_ptr);
__m128i byte2mask = _mm_set1_epi32(0x00FF0000);
__m128i byte3mask = _mm_set1_epi32(0x0000FF00);
__m128i shufmask =
_mm_set_epi8(0x0C, 0x0D, 0x0E, 0x0F, 0x08, 0x09, 0x0A, 0x0B, 0x04, 0x05,
0x06, 0x07, 0x00, 0x01, 0x02, 0x03);
size_t i;
for (i = 0; i + 4 <= count; i += 4) {
__m128i input = _mm_loadu_si128(reinterpret_cast<const __m128i*>(&src[i]));
// Do the four shifts.
__m128i byte1 = _mm_slli_epi32(input, 24);
__m128i byte2 = _mm_slli_epi32(input, 8);
__m128i byte3 = _mm_srli_epi32(input, 8);
__m128i byte4 = _mm_srli_epi32(input, 24);
// OR bytes together.
__m128i output = _mm_or_si128(byte1, byte4);
byte2 = _mm_and_si128(byte2, byte2mask);
output = _mm_or_si128(output, byte2);
byte3 = _mm_and_si128(byte3, byte3mask);
output = _mm_or_si128(output, byte3);
__m128i output = _mm_shuffle_epi8(input, shufmask);
_mm_storeu_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
@ -75,32 +100,37 @@ void copy_and_swap_32_unaligned(void* dest_ptr, const void* src_ptr,
}
}
void copy_and_swap_64_aligned(void* dest, const void* src, size_t count) {
return copy_and_swap_64_unaligned(dest, src, count);
void copy_and_swap_64_aligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint64_t*>(dest_ptr);
auto src = reinterpret_cast<const uint64_t*>(src_ptr);
__m128i shufmask =
_mm_set_epi8(0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x00, 0x01,
0x02, 0x03, 0x04, 0x05, 0x06, 0x07);
size_t i;
for (i = 0; i + 2 <= count; i += 2) {
__m128i input = _mm_load_si128(reinterpret_cast<const __m128i*>(&src[i]));
__m128i output = _mm_shuffle_epi8(input, shufmask);
_mm_store_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
dest[i] = byte_swap(src[i]);
}
}
void copy_and_swap_64_unaligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint64_t*>(dest_ptr);
auto src = reinterpret_cast<const uint64_t*>(src_ptr);
__m128i byte2mask = _mm_set1_epi32(0x00FF0000);
__m128i byte3mask = _mm_set1_epi32(0x0000FF00);
__m128i shufmask =
_mm_set_epi8(0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x00, 0x01,
0x02, 0x03, 0x04, 0x05, 0x06, 0x07);
size_t i;
for (i = 0; i + 2 <= count; i += 2) {
__m128i input = _mm_loadu_si128(reinterpret_cast<const __m128i*>(&src[i]));
// Do the four shifts.
__m128i byte1 = _mm_slli_epi32(input, 24);
__m128i byte2 = _mm_slli_epi32(input, 8);
__m128i byte3 = _mm_srli_epi32(input, 8);
__m128i byte4 = _mm_srli_epi32(input, 24);
// OR bytes together.
__m128i output = _mm_or_si128(byte1, byte4);
byte2 = _mm_and_si128(byte2, byte2mask);
output = _mm_or_si128(output, byte2);
byte3 = _mm_and_si128(byte3, byte3mask);
output = _mm_or_si128(output, byte3);
// Reorder the two words.
output = _mm_shuffle_epi32(output, _MM_SHUFFLE(2, 3, 0, 1));
__m128i output = _mm_shuffle_epi8(input, shufmask);
_mm_storeu_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
@ -108,8 +138,20 @@ void copy_and_swap_64_unaligned(void* dest_ptr, const void* src_ptr,
}
}
void copy_and_swap_16_in_32_aligned(void* dest, const void* src, size_t count) {
return copy_and_swap_16_in_32_unaligned(dest, src, count);
void copy_and_swap_16_in_32_aligned(void* dest_ptr, const void* src_ptr,
size_t count) {
auto dest = reinterpret_cast<uint64_t*>(dest_ptr);
auto src = reinterpret_cast<const uint64_t*>(src_ptr);
size_t i;
for (i = 0; i + 4 <= count; i += 4) {
__m128i input = _mm_load_si128(reinterpret_cast<const __m128i*>(&src[i]));
__m128i output =
_mm_or_si128(_mm_slli_epi32(input, 16), _mm_srli_epi32(input, 16));
_mm_store_si128(reinterpret_cast<__m128i*>(&dest[i]), output);
}
for (; i < count; ++i) { // handle residual elements
dest[i] = (src[i] >> 16) | (src[i] << 16);
}
}
void copy_and_swap_16_in_32_unaligned(void* dest_ptr, const void* src_ptr,

View File

@ -1663,7 +1663,6 @@ struct LOAD_VECTOR_SHL_I8
e.shl(e.dx, 4);
e.mov(e.rax, (uintptr_t)lvsl_table);
e.vmovaps(i.dest, e.ptr[e.rax + e.rdx]);
e.ReloadMembase();
}
}
};
@ -1705,7 +1704,6 @@ struct LOAD_VECTOR_SHR_I8
e.shl(e.dx, 4);
e.mov(e.rax, (uintptr_t)lvsr_table);
e.vmovaps(i.dest, e.ptr[e.rax + e.rdx]);
e.ReloadMembase();
}
}
};
@ -2129,6 +2127,176 @@ struct STORE_MMIO_I32
};
EMITTER_OPCODE_TABLE(OPCODE_STORE_MMIO, STORE_MMIO_I32);
// ============================================================================
// OPCODE_LOAD_OFFSET
// ============================================================================
template <typename T>
RegExp ComputeMemoryAddressOffset(X64Emitter& e, const T& guest,
const T& offset) {
int32_t offset_const = static_cast<int32_t>(offset.constant());
if (guest.is_constant) {
uint32_t address = static_cast<uint32_t>(guest.constant());
address += static_cast<int32_t>(offset.constant());
if (address < 0x80000000) {
return e.GetMembaseReg() + address;
} else {
e.mov(e.eax, address);
return e.GetMembaseReg() + e.rax;
}
} else {
// Clear the top 32 bits, as they are likely garbage.
// TODO(benvanik): find a way to avoid doing this.
e.mov(e.eax, guest.reg().cvt32());
return e.GetMembaseReg() + e.rax + offset_const;
}
}
struct LOAD_OFFSET_I8
: Sequence<LOAD_OFFSET_I8, I<OPCODE_LOAD_OFFSET, I8Op, I64Op, I64Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
e.mov(i.dest, e.byte[addr]);
}
};
struct LOAD_OFFSET_I16
: Sequence<LOAD_OFFSET_I16, I<OPCODE_LOAD_OFFSET, I16Op, I64Op, I64Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.instr->flags & LoadStoreFlags::LOAD_STORE_BYTE_SWAP) {
if (e.IsFeatureEnabled(kX64EmitMovbe)) {
e.movbe(i.dest, e.word[addr]);
} else {
e.mov(i.dest, e.word[addr]);
e.ror(i.dest, 8);
}
} else {
e.mov(i.dest, e.word[addr]);
}
}
};
struct LOAD_OFFSET_I32
: Sequence<LOAD_OFFSET_I32, I<OPCODE_LOAD_OFFSET, I32Op, I64Op, I64Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.instr->flags & LoadStoreFlags::LOAD_STORE_BYTE_SWAP) {
if (e.IsFeatureEnabled(kX64EmitMovbe)) {
e.movbe(i.dest, e.dword[addr]);
} else {
e.mov(i.dest, e.dword[addr]);
e.bswap(i.dest);
}
} else {
e.mov(i.dest, e.dword[addr]);
}
}
};
struct LOAD_OFFSET_I64
: Sequence<LOAD_OFFSET_I64, I<OPCODE_LOAD_OFFSET, I64Op, I64Op, I64Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.instr->flags & LoadStoreFlags::LOAD_STORE_BYTE_SWAP) {
if (e.IsFeatureEnabled(kX64EmitMovbe)) {
e.movbe(i.dest, e.qword[addr]);
} else {
e.mov(i.dest, e.qword[addr]);
e.bswap(i.dest);
}
} else {
e.mov(i.dest, e.qword[addr]);
}
}
};
EMITTER_OPCODE_TABLE(OPCODE_LOAD_OFFSET, LOAD_OFFSET_I8, LOAD_OFFSET_I16,
LOAD_OFFSET_I32, LOAD_OFFSET_I64);
// ============================================================================
// OPCODE_STORE_OFFSET
// ============================================================================
struct STORE_OFFSET_I8
: Sequence<STORE_OFFSET_I8,
I<OPCODE_STORE_OFFSET, VoidOp, I64Op, I64Op, I8Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.src3.is_constant) {
e.mov(e.byte[addr], i.src3.constant());
} else {
e.mov(e.byte[addr], i.src3);
}
}
};
struct STORE_OFFSET_I16
: Sequence<STORE_OFFSET_I16,
I<OPCODE_STORE_OFFSET, VoidOp, I64Op, I64Op, I16Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.instr->flags & LoadStoreFlags::LOAD_STORE_BYTE_SWAP) {
assert_false(i.src3.is_constant);
if (e.IsFeatureEnabled(kX64EmitMovbe)) {
e.movbe(e.word[addr], i.src3);
} else {
assert_always("not implemented");
}
} else {
if (i.src3.is_constant) {
e.mov(e.word[addr], i.src3.constant());
} else {
e.mov(e.word[addr], i.src3);
}
}
}
};
struct STORE_OFFSET_I32
: Sequence<STORE_OFFSET_I32,
I<OPCODE_STORE_OFFSET, VoidOp, I64Op, I64Op, I32Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.instr->flags & LoadStoreFlags::LOAD_STORE_BYTE_SWAP) {
assert_false(i.src3.is_constant);
if (e.IsFeatureEnabled(kX64EmitMovbe)) {
e.movbe(e.dword[addr], i.src3);
} else {
assert_always("not implemented");
}
} else {
if (i.src3.is_constant) {
e.mov(e.dword[addr], i.src3.constant());
} else {
e.mov(e.dword[addr], i.src3);
}
}
}
};
struct STORE_OFFSET_I64
: Sequence<STORE_OFFSET_I64,
I<OPCODE_STORE_OFFSET, VoidOp, I64Op, I64Op, I64Op>> {
static void Emit(X64Emitter& e, const EmitArgType& i) {
auto addr = ComputeMemoryAddressOffset(e, i.src1, i.src2);
if (i.instr->flags & LoadStoreFlags::LOAD_STORE_BYTE_SWAP) {
assert_false(i.src3.is_constant);
if (e.IsFeatureEnabled(kX64EmitMovbe)) {
e.movbe(e.qword[addr], i.src3);
} else {
assert_always("not implemented");
}
} else {
if (i.src3.is_constant) {
e.MovMem64(addr, i.src3.constant());
} else {
e.mov(e.qword[addr], i.src3);
}
}
}
};
EMITTER_OPCODE_TABLE(OPCODE_STORE_OFFSET, STORE_OFFSET_I8, STORE_OFFSET_I16,
STORE_OFFSET_I32, STORE_OFFSET_I64);
// ============================================================================
// OPCODE_LOAD
// ============================================================================
@ -2139,8 +2307,13 @@ RegExp ComputeMemoryAddress(X64Emitter& e, const T& guest) {
// TODO(benvanik): figure out how to do this without a temp.
// Since the constant is often 0x8... if we tried to use that as a
// displacement it would be sign extended and mess things up.
e.mov(e.eax, static_cast<uint32_t>(guest.constant()));
return e.GetMembaseReg() + e.rax;
uint32_t address = static_cast<uint32_t>(guest.constant());
if (address < 0x80000000) {
return e.GetMembaseReg() + address;
} else {
e.mov(e.eax, address);
return e.GetMembaseReg() + e.rax;
}
} else {
// Clear the top 32 bits, as they are likely garbage.
// TODO(benvanik): find a way to avoid doing this.
@ -3863,8 +4036,6 @@ struct MUL_I8 : Sequence<MUL_I8, I<OPCODE_MUL, I8Op, I8Op, I8Op>> {
e.mov(i.dest, e.al);
}
}
e.ReloadMembase();
}
};
struct MUL_I16 : Sequence<MUL_I16, I<OPCODE_MUL, I16Op, I16Op, I16Op>> {
@ -3906,8 +4077,6 @@ struct MUL_I16 : Sequence<MUL_I16, I<OPCODE_MUL, I16Op, I16Op, I16Op>> {
e.movzx(i.dest, e.ax);
}
}
e.ReloadMembase();
}
};
struct MUL_I32 : Sequence<MUL_I32, I<OPCODE_MUL, I32Op, I32Op, I32Op>> {
@ -3950,8 +4119,6 @@ struct MUL_I32 : Sequence<MUL_I32, I<OPCODE_MUL, I32Op, I32Op, I32Op>> {
e.mov(i.dest, e.eax);
}
}
e.ReloadMembase();
}
};
struct MUL_I64 : Sequence<MUL_I64, I<OPCODE_MUL, I64Op, I64Op, I64Op>> {
@ -3993,8 +4160,6 @@ struct MUL_I64 : Sequence<MUL_I64, I<OPCODE_MUL, I64Op, I64Op, I64Op>> {
e.mov(i.dest, e.rax);
}
}
e.ReloadMembase();
}
};
struct MUL_F32 : Sequence<MUL_F32, I<OPCODE_MUL, F32Op, F32Op, F32Op>> {
@ -4072,7 +4237,6 @@ struct MUL_HI_I8 : Sequence<MUL_HI_I8, I<OPCODE_MUL_HI, I8Op, I8Op, I8Op>> {
}
e.mov(i.dest, e.ah);
}
e.ReloadMembase();
}
};
struct MUL_HI_I16
@ -4116,7 +4280,6 @@ struct MUL_HI_I16
}
e.mov(i.dest, e.dx);
}
e.ReloadMembase();
}
};
struct MUL_HI_I32
@ -4165,7 +4328,6 @@ struct MUL_HI_I32
}
e.mov(i.dest, e.edx);
}
e.ReloadMembase();
}
};
struct MUL_HI_I64
@ -4214,7 +4376,6 @@ struct MUL_HI_I64
}
e.mov(i.dest, e.rdx);
}
e.ReloadMembase();
}
};
EMITTER_OPCODE_TABLE(OPCODE_MUL_HI, MUL_HI_I8, MUL_HI_I16, MUL_HI_I32,
@ -4230,11 +4391,8 @@ struct DIV_I8 : Sequence<DIV_I8, I<OPCODE_DIV, I8Op, I8Op, I8Op>> {
Xbyak::Label skip;
e.inLocalLabel();
// NOTE: RDX clobbered.
bool clobbered_rcx = false;
if (i.src2.is_constant) {
assert_true(!i.src1.is_constant);
clobbered_rcx = true;
e.mov(e.cl, i.src2.constant());
if (i.instr->flags & ARITHMETIC_UNSIGNED) {
e.movzx(e.ax, i.src1);
@ -4268,10 +4426,6 @@ struct DIV_I8 : Sequence<DIV_I8, I<OPCODE_DIV, I8Op, I8Op, I8Op>> {
e.L(skip);
e.outLocalLabel();
e.mov(i.dest, e.al);
if (clobbered_rcx) {
e.ReloadContext();
}
e.ReloadMembase();
}
};
struct DIV_I16 : Sequence<DIV_I16, I<OPCODE_DIV, I16Op, I16Op, I16Op>> {
@ -4279,11 +4433,8 @@ struct DIV_I16 : Sequence<DIV_I16, I<OPCODE_DIV, I16Op, I16Op, I16Op>> {
Xbyak::Label skip;
e.inLocalLabel();
// NOTE: RDX clobbered.
bool clobbered_rcx = false;
if (i.src2.is_constant) {
assert_true(!i.src1.is_constant);
clobbered_rcx = true;
e.mov(e.cx, i.src2.constant());
if (i.instr->flags & ARITHMETIC_UNSIGNED) {
e.mov(e.ax, i.src1);
@ -4323,10 +4474,6 @@ struct DIV_I16 : Sequence<DIV_I16, I<OPCODE_DIV, I16Op, I16Op, I16Op>> {
e.L(skip);
e.outLocalLabel();
e.mov(i.dest, e.ax);
if (clobbered_rcx) {
e.ReloadContext();
}
e.ReloadMembase();
}
};
struct DIV_I32 : Sequence<DIV_I32, I<OPCODE_DIV, I32Op, I32Op, I32Op>> {
@ -4334,11 +4481,8 @@ struct DIV_I32 : Sequence<DIV_I32, I<OPCODE_DIV, I32Op, I32Op, I32Op>> {
Xbyak::Label skip;
e.inLocalLabel();
// NOTE: RDX clobbered.
bool clobbered_rcx = false;
if (i.src2.is_constant) {
assert_true(!i.src1.is_constant);
clobbered_rcx = true;
e.mov(e.ecx, i.src2.constant());
if (i.instr->flags & ARITHMETIC_UNSIGNED) {
e.mov(e.eax, i.src1);
@ -4378,10 +4522,6 @@ struct DIV_I32 : Sequence<DIV_I32, I<OPCODE_DIV, I32Op, I32Op, I32Op>> {
e.L(skip);
e.outLocalLabel();
e.mov(i.dest, e.eax);
if (clobbered_rcx) {
e.ReloadContext();
}
e.ReloadMembase();
}
};
struct DIV_I64 : Sequence<DIV_I64, I<OPCODE_DIV, I64Op, I64Op, I64Op>> {
@ -4389,11 +4529,8 @@ struct DIV_I64 : Sequence<DIV_I64, I<OPCODE_DIV, I64Op, I64Op, I64Op>> {
Xbyak::Label skip;
e.inLocalLabel();
// NOTE: RDX clobbered.
bool clobbered_rcx = false;
if (i.src2.is_constant) {
assert_true(!i.src1.is_constant);
clobbered_rcx = true;
e.mov(e.rcx, i.src2.constant());
if (i.instr->flags & ARITHMETIC_UNSIGNED) {
e.mov(e.rax, i.src1);
@ -4433,10 +4570,6 @@ struct DIV_I64 : Sequence<DIV_I64, I<OPCODE_DIV, I64Op, I64Op, I64Op>> {
e.L(skip);
e.outLocalLabel();
e.mov(i.dest, e.rax);
if (clobbered_rcx) {
e.ReloadContext();
}
e.ReloadMembase();
}
};
struct DIV_F32 : Sequence<DIV_F32, I<OPCODE_DIV, F32Op, F32Op, F32Op>> {
@ -5319,7 +5452,6 @@ void EmitShlXX(X64Emitter& e, const ARGS& i) {
} else {
e.mov(e.cl, src);
e.shl(dest_src, e.cl);
e.ReloadContext();
}
},
[](X64Emitter& e, const REG& dest_src, int8_t constant) {
@ -5397,7 +5529,6 @@ void EmitShrXX(X64Emitter& e, const ARGS& i) {
} else {
e.mov(e.cl, src);
e.shr(dest_src, e.cl);
e.ReloadContext();
}
},
[](X64Emitter& e, const REG& dest_src, int8_t constant) {
@ -5473,7 +5604,6 @@ void EmitSarXX(X64Emitter& e, const ARGS& i) {
} else {
e.mov(e.cl, src);
e.sar(dest_src, e.cl);
e.ReloadContext();
}
},
[](X64Emitter& e, const REG& dest_src, int8_t constant) {
@ -6088,7 +6218,6 @@ void EmitRotateLeftXX(X64Emitter& e, const ARGS& i) {
}
}
e.rol(i.dest, e.cl);
e.ReloadContext();
}
}
struct ROTATE_LEFT_I8
@ -6355,24 +6484,17 @@ struct CNTLZ_I8 : Sequence<CNTLZ_I8, I<OPCODE_CNTLZ, I8Op, I8Op>> {
e.lzcnt(i.dest.reg().cvt16(), i.dest.reg().cvt16());
e.sub(i.dest, 8);
} else {
Xbyak::Label jz, jend;
Xbyak::Label end;
e.inLocalLabel();
// BSR: searches $2 until MSB 1 found, stores idx (from bit 0) in $1
// if input is 0, results are undefined (and ZF is set)
e.bsr(i.dest, i.src1);
e.jz(jz); // Jump if zero
e.bsr(e.rax, i.src1); // ZF set if i.src1 is 0
e.mov(i.dest, 0x8);
e.jz(end);
// Invert the result (7 - i.dest)
e.xor_(i.dest, 0x7);
e.jmp(jend); // Jmp to end
e.xor_(e.rax, 0x7);
e.mov(i.dest, e.rax);
// src1 was zero, so write 8 to the dest reg
e.L(jz);
e.mov(i.dest, 8);
e.L(jend);
e.L(end);
e.outLocalLabel();
}
}
@ -6383,24 +6505,17 @@ struct CNTLZ_I16 : Sequence<CNTLZ_I16, I<OPCODE_CNTLZ, I8Op, I16Op>> {
// LZCNT: searches $2 until MSB 1 found, stores idx (from last bit) in $1
e.lzcnt(i.dest.reg().cvt32(), i.src1);
} else {
Xbyak::Label jz, jend;
Xbyak::Label end;
e.inLocalLabel();
// BSR: searches $2 until MSB 1 found, stores idx (from bit 0) in $1
// if input is 0, results are undefined (and ZF is set)
e.bsr(i.dest, i.src1);
e.jz(jz); // Jump if zero
e.bsr(e.rax, i.src1); // ZF set if i.src1 is 0
e.mov(i.dest, 0x10);
e.jz(end);
// Invert the result (15 - i.dest)
e.xor_(i.dest, 0xF);
e.jmp(jend); // Jmp to end
e.xor_(e.rax, 0x0F);
e.mov(i.dest, e.rax);
// src1 was zero, so write 16 to the dest reg
e.L(jz);
e.mov(i.dest, 16);
e.L(jend);
e.L(end);
e.outLocalLabel();
}
}
@ -6410,24 +6525,17 @@ struct CNTLZ_I32 : Sequence<CNTLZ_I32, I<OPCODE_CNTLZ, I8Op, I32Op>> {
if (e.IsFeatureEnabled(kX64EmitLZCNT)) {
e.lzcnt(i.dest.reg().cvt32(), i.src1);
} else {
Xbyak::Label jz, jend;
Xbyak::Label end;
e.inLocalLabel();
// BSR: searches $2 until MSB 1 found, stores idx (from bit 0) in $1
// if input is 0, results are undefined (and ZF is set)
e.bsr(i.dest, i.src1);
e.jz(jz); // Jump if zero
e.bsr(e.rax, i.src1); // ZF set if i.src1 is 0
e.mov(i.dest, 0x20);
e.jz(end);
// Invert the result (31 - i.dest)
e.xor_(i.dest, 0x1F);
e.jmp(jend); // Jmp to end
e.xor_(e.rax, 0x1F);
e.mov(i.dest, e.rax);
// src1 was zero, so write 32 to the dest reg
e.L(jz);
e.mov(i.dest, 32);
e.L(jend);
e.L(end);
e.outLocalLabel();
}
}
@ -6437,24 +6545,17 @@ struct CNTLZ_I64 : Sequence<CNTLZ_I64, I<OPCODE_CNTLZ, I8Op, I64Op>> {
if (e.IsFeatureEnabled(kX64EmitLZCNT)) {
e.lzcnt(i.dest.reg().cvt64(), i.src1);
} else {
Xbyak::Label jz, jend;
Xbyak::Label end;
e.inLocalLabel();
// BSR: searches $2 until MSB 1 found, stores idx (from bit 0) in $1
// if input is 0, results are undefined (and ZF is set)
e.bsr(i.dest, i.src1);
e.jz(jz); // Jump if zero
e.bsr(e.rax, i.src1); // ZF set if i.src1 is 0
e.mov(i.dest, 0x40);
e.jz(end);
// Invert the result (63 - i.dest)
e.xor_(i.dest, 0x3F);
e.jmp(jend); // Jmp to end
e.xor_(e.rax, 0x3F);
e.mov(i.dest, e.rax);
// src1 was zero, so write 64 to the dest reg
e.L(jz);
e.mov(i.dest, 64);
e.L(jend);
e.L(end);
e.outLocalLabel();
}
}
@ -6579,7 +6680,6 @@ struct EXTRACT_I32
e.vmovaps(e.xmm0, e.ptr[e.rdx + e.rax]);
e.vpshufb(e.xmm0, src1, e.xmm0);
e.vpextrd(i.dest, e.xmm0, 0);
e.ReloadMembase();
}
}
};
@ -7619,8 +7719,6 @@ struct ATOMIC_COMPARE_EXCHANGE_I32
e.lock();
e.cmpxchg(e.dword[e.GetMembaseReg() + e.rcx], i.src3);
e.sete(i.dest);
e.ReloadContext();
}
};
struct ATOMIC_COMPARE_EXCHANGE_I64
@ -7632,8 +7730,6 @@ struct ATOMIC_COMPARE_EXCHANGE_I64
e.lock();
e.cmpxchg(e.qword[e.GetMembaseReg() + e.rcx], i.src3);
e.sete(i.dest);
e.ReloadContext();
}
};
EMITTER_OPCODE_TABLE(OPCODE_ATOMIC_COMPARE_EXCHANGE,
@ -7696,6 +7792,8 @@ void RegisterSequences() {
Register_OPCODE_CONTEXT_BARRIER();
Register_OPCODE_LOAD_MMIO();
Register_OPCODE_STORE_MMIO();
Register_OPCODE_LOAD_OFFSET();
Register_OPCODE_STORE_OFFSET();
Register_OPCODE_LOAD();
Register_OPCODE_STORE();
Register_OPCODE_MEMSET();

View File

@ -195,10 +195,15 @@ bool ConstantPropagationPass::Run(HIRBuilder* builder) {
break;
case OPCODE_LOAD:
case OPCODE_LOAD_OFFSET:
if (i->src1.value->IsConstant()) {
assert_false(i->flags & LOAD_STORE_BYTE_SWAP);
auto memory = processor_->memory();
auto address = i->src1.value->constant.i32;
if (i->opcode->num == OPCODE_LOAD_OFFSET) {
address += i->src2.value->constant.i32;
}
auto mmio_range =
processor_->memory()->LookupVirtualMappedRange(address);
if (FLAGS_inline_mmio_access && mmio_range) {
@ -246,12 +251,21 @@ bool ConstantPropagationPass::Run(HIRBuilder* builder) {
}
break;
case OPCODE_STORE:
case OPCODE_STORE_OFFSET:
if (FLAGS_inline_mmio_access && i->src1.value->IsConstant()) {
auto address = i->src1.value->constant.i32;
if (i->opcode->num == OPCODE_STORE_OFFSET) {
address += i->src2.value->constant.i32;
}
auto mmio_range =
processor_->memory()->LookupVirtualMappedRange(address);
if (mmio_range) {
auto value = i->src2.value;
if (i->opcode->num == OPCODE_STORE_OFFSET) {
value = i->src3.value;
}
i->Replace(&OPCODE_STORE_MMIO_info, 0);
i->src1.offset = reinterpret_cast<uint64_t>(mmio_range);
i->src2.offset = address;

View File

@ -35,9 +35,11 @@ bool MemorySequenceCombinationPass::Run(HIRBuilder* builder) {
while (block) {
auto i = block->instr_head;
while (i) {
if (i->opcode == &OPCODE_LOAD_info) {
if (i->opcode == &OPCODE_LOAD_info ||
i->opcode == &OPCODE_LOAD_OFFSET_info) {
CombineLoadSequence(i);
} else if (i->opcode == &OPCODE_STORE_info) {
} else if (i->opcode == &OPCODE_STORE_info ||
i->opcode == &OPCODE_STORE_OFFSET_info) {
CombineStoreSequence(i);
}
i = i->next;
@ -112,6 +114,10 @@ void MemorySequenceCombinationPass::CombineStoreSequence(Instr* i) {
// store_convert v0, v1.i64, [swap|i64->i32,trunc]
auto src = i->src2.value;
if (i->opcode == &OPCODE_STORE_OFFSET_info) {
src = i->src3.value;
}
if (src->IsConstant()) {
// Constant value write - ignore.
return;
@ -135,7 +141,11 @@ void MemorySequenceCombinationPass::CombineStoreSequence(Instr* i) {
// Pull the original value (from before the byte swap).
// The byte swap itself will go away in DCE.
i->set_src2(def->src1.value);
if (i->opcode == &OPCODE_STORE_info) {
i->set_src2(def->src1.value);
} else if (i->opcode == &OPCODE_STORE_OFFSET_info) {
i->set_src3(def->src1.value);
}
// TODO(benvanik): extend/truncate.
}

View File

@ -1232,6 +1232,25 @@ void HIRBuilder::StoreMmio(cpu::MMIORange* mmio_range, uint32_t address,
i->set_src3(value);
}
Value* HIRBuilder::LoadOffset(Value* address, Value* offset, TypeName type,
uint32_t load_flags) {
ASSERT_ADDRESS_TYPE(address);
Instr* i = AppendInstr(OPCODE_LOAD_OFFSET_info, load_flags, AllocValue(type));
i->set_src1(address);
i->set_src2(offset);
i->src3.value = NULL;
return i->dest;
}
void HIRBuilder::StoreOffset(Value* address, Value* offset, Value* value,
uint32_t store_flags) {
ASSERT_ADDRESS_TYPE(address);
Instr* i = AppendInstr(OPCODE_STORE_OFFSET_info, store_flags);
i->set_src1(address);
i->set_src2(offset);
i->set_src3(value);
}
Value* HIRBuilder::Load(Value* address, TypeName type, uint32_t load_flags) {
ASSERT_ADDRESS_TYPE(address);
Instr* i = AppendInstr(OPCODE_LOAD_info, load_flags, AllocValue(type));

View File

@ -147,6 +147,11 @@ class HIRBuilder {
Value* LoadMmio(cpu::MMIORange* mmio_range, uint32_t address, TypeName type);
void StoreMmio(cpu::MMIORange* mmio_range, uint32_t address, Value* value);
Value* LoadOffset(Value* address, Value* offset, TypeName type,
uint32_t load_flags = 0);
void StoreOffset(Value* address, Value* offset, Value* value,
uint32_t store_flags = 0);
Value* Load(Value* address, TypeName type, uint32_t load_flags = 0);
void Store(Value* address, Value* value, uint32_t store_flags = 0);
void Memset(Value* address, Value* value, Value* length);

View File

@ -152,6 +152,8 @@ enum Opcode {
OPCODE_CONTEXT_BARRIER,
OPCODE_LOAD_MMIO,
OPCODE_STORE_MMIO,
OPCODE_LOAD_OFFSET,
OPCODE_STORE_OFFSET,
OPCODE_LOAD,
OPCODE_STORE,
OPCODE_MEMSET,

View File

@ -231,6 +231,18 @@ DEFINE_OPCODE(
OPCODE_SIG_X_O_O_V,
OPCODE_FLAG_MEMORY)
DEFINE_OPCODE(
OPCODE_LOAD_OFFSET,
"load_offset",
OPCODE_SIG_V_V_V,
OPCODE_FLAG_MEMORY)
DEFINE_OPCODE(
OPCODE_STORE_OFFSET,
"store_offset",
OPCODE_SIG_X_V_V_V,
OPCODE_FLAG_MEMORY)
DEFINE_OPCODE(
OPCODE_LOAD,
"load",

View File

@ -249,22 +249,22 @@ enum class PPCRegister {
typedef struct PPCContext_s {
// Must be stored at 0x0 for now.
// TODO(benvanik): find a nice way to describe this to the JIT.
ThreadState* thread_state;
ThreadState* thread_state; // 0x0
// TODO(benvanik): this is getting nasty. Must be here.
uint8_t* virtual_membase;
uint8_t* virtual_membase; // 0x8
// Most frequently used registers first.
uint64_t lr; // Link register
uint64_t ctr; // Count register
uint64_t r[32]; // General purpose registers
double f[32]; // Floating-point registers
vec128_t v[128]; // VMX128 vector registers
uint64_t lr; // 0x10 Link register
uint64_t ctr; // 0x18 Count register
uint64_t r[32]; // 0x20 General purpose registers
double f[32]; // 0x120 Floating-point registers
vec128_t v[128]; // 0x220 VMX128 vector registers
// XER register:
// Split to make it easier to do individual updates.
uint8_t xer_ca;
uint8_t xer_ov;
uint8_t xer_so;
uint8_t xer_ca; // 0xA20
uint8_t xer_ov; // 0xA21
uint8_t xer_so; // 0xA22
// Condition registers:
// These are split to make it easier to do DCE on unused stores.
@ -279,7 +279,7 @@ typedef struct PPCContext_s {
// successfully
uint8_t cr0_so; // Summary Overflow (SO) - copy of XER[SO]
};
} cr0;
} cr0; // 0xA24
union {
uint32_t value;
struct {

View File

@ -960,7 +960,7 @@ int InstrEmit_rlwimix(PPCHIRBuilder& f, const InstrData& i) {
// RA <- r&m | (RA)&¬m
Value* v = f.LoadGPR(i.M.RT);
// (x||x)
v = f.Or(f.Shl(v, 32), f.And(v, f.LoadConstantUint64(0xFFFFFFFF)));
v = f.Or(f.Shl(v, 32), f.ZeroExtend(f.Truncate(v, INT32_TYPE), INT64_TYPE));
if (i.M.SH) {
v = f.RotateLeft(v, f.LoadConstantInt8(i.M.SH));
}
@ -984,8 +984,10 @@ int InstrEmit_rlwinmx(PPCHIRBuilder& f, const InstrData& i) {
// m <- MASK(MB+32, ME+32)
// RA <- r & m
Value* v = f.LoadGPR(i.M.RT);
// (x||x)
v = f.Or(f.Shl(v, 32), f.And(v, f.LoadConstantUint64(0xFFFFFFFF)));
v = f.Or(f.Shl(v, 32), f.ZeroExtend(f.Truncate(v, INT32_TYPE), INT64_TYPE));
// TODO(benvanik): optimize srwi
// TODO(benvanik): optimize slwi
// The compiler will generate a bunch of these for the special case of SH=0.
@ -1016,7 +1018,7 @@ int InstrEmit_rlwnmx(PPCHIRBuilder& f, const InstrData& i) {
f.And(f.Truncate(f.LoadGPR(i.M.SH), INT8_TYPE), f.LoadConstantInt8(0x1F));
Value* v = f.LoadGPR(i.M.RT);
// (x||x)
v = f.Or(f.Shl(v, 32), f.And(v, f.LoadConstantUint64(0xFFFFFFFF)));
v = f.Or(f.Shl(v, 32), f.ZeroExtend(f.Truncate(v, INT32_TYPE), INT64_TYPE));
v = f.RotateLeft(v, sh);
v = f.And(v, f.LoadConstantUint64(XEMASK(i.M.MB + 32, i.M.ME + 32)));
f.StoreGPR(i.M.RA, v);

View File

@ -63,8 +63,15 @@ int InstrEmit_lbz(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// RT <- i56.0 || MEM(EA, 1)
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.Load(ea, INT8_TYPE), INT64_TYPE);
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.LoadOffset(b, offset, INT8_TYPE), INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
return 0;
}
@ -73,10 +80,11 @@ int InstrEmit_lbzu(PPCHIRBuilder& f, const InstrData& i) {
// EA <- (RA) + EXTS(D)
// RT <- i56.0 || MEM(EA, 1)
// RA <- EA
Value* ea = CalculateEA_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.Load(ea, INT8_TYPE), INT64_TYPE);
Value* ra = f.LoadGPR(i.D.RA);
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.LoadOffset(ra, offset, INT8_TYPE), INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
StoreEA(f, i.D.RA, ea);
StoreEA(f, i.D.RA, f.Add(ra, offset));
return 0;
}
@ -111,8 +119,16 @@ int InstrEmit_lha(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// RT <- EXTS(MEM(EA, 2))
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.SignExtend(f.ByteSwap(f.Load(ea, INT16_TYPE)), INT64_TYPE);
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt =
f.SignExtend(f.ByteSwap(f.LoadOffset(b, offset, INT16_TYPE)), INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
return 0;
}
@ -121,10 +137,12 @@ int InstrEmit_lhau(PPCHIRBuilder& f, const InstrData& i) {
// EA <- (RA) + EXTS(D)
// RT <- EXTS(MEM(EA, 2))
// RA <- EA
Value* ea = CalculateEA_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.SignExtend(f.ByteSwap(f.Load(ea, INT16_TYPE)), INT64_TYPE);
Value* ra = f.LoadGPR(i.D.RA);
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt = f.SignExtend(f.ByteSwap(f.LoadOffset(ra, offset, INT16_TYPE)),
INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
StoreEA(f, i.D.RA, ea);
StoreEA(f, i.D.RA, f.Add(ra, offset));
return 0;
}
@ -159,8 +177,16 @@ int InstrEmit_lhz(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// RT <- i48.0 || MEM(EA, 2)
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.ByteSwap(f.Load(ea, INT16_TYPE)), INT64_TYPE);
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt =
f.ZeroExtend(f.ByteSwap(f.LoadOffset(b, offset, INT16_TYPE)), INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
return 0;
}
@ -169,10 +195,12 @@ int InstrEmit_lhzu(PPCHIRBuilder& f, const InstrData& i) {
// EA <- (RA) + EXTS(D)
// RT <- i48.0 || MEM(EA, 2)
// RA <- EA
Value* ea = CalculateEA_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.ByteSwap(f.Load(ea, INT16_TYPE)), INT64_TYPE);
Value* ra = f.LoadGPR(i.D.RA);
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.ByteSwap(f.LoadOffset(ra, offset, INT16_TYPE)),
INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
StoreEA(f, i.D.RA, ea);
StoreEA(f, i.D.RA, f.Add(ra, offset));
return 0;
}
@ -207,8 +235,16 @@ int InstrEmit_lwa(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D || 00)
// RT <- EXTS(MEM(EA, 4))
Value* ea = CalculateEA_0_i(f, i.DS.RA, XEEXTS16(i.DS.DS << 2));
Value* rt = f.SignExtend(f.ByteSwap(f.Load(ea, INT32_TYPE)), INT64_TYPE);
Value* b;
if (i.DS.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.DS.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.DS.DS << 2));
Value* rt =
f.SignExtend(f.ByteSwap(f.LoadOffset(b, offset, INT32_TYPE)), INT64_TYPE);
f.StoreGPR(i.DS.RT, rt);
return 0;
}
@ -244,8 +280,16 @@ int InstrEmit_lwz(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// RT <- i32.0 || MEM(EA, 4)
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.ByteSwap(f.Load(ea, INT32_TYPE)), INT64_TYPE);
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt =
f.ZeroExtend(f.ByteSwap(f.LoadOffset(b, offset, INT32_TYPE)), INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
return 0;
}
@ -254,10 +298,12 @@ int InstrEmit_lwzu(PPCHIRBuilder& f, const InstrData& i) {
// EA <- (RA) + EXTS(D)
// RT <- i32.0 || MEM(EA, 4)
// RA <- EA
Value* ea = CalculateEA_i(f, i.D.RA, XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.ByteSwap(f.Load(ea, INT32_TYPE)), INT64_TYPE);
Value* ra = f.LoadGPR(i.D.RA);
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
Value* rt = f.ZeroExtend(f.ByteSwap(f.LoadOffset(ra, offset, INT32_TYPE)),
INT64_TYPE);
f.StoreGPR(i.D.RT, rt);
StoreEA(f, i.D.RA, ea);
StoreEA(f, i.D.RA, f.Add(ra, offset));
return 0;
}
@ -292,8 +338,15 @@ int InstrEmit_ld(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(DS || 0b00)
// RT <- MEM(EA, 8)
Value* ea = CalculateEA_0_i(f, i.DS.RA, XEEXTS16(i.DS.DS << 2));
Value* rt = f.ByteSwap(f.Load(ea, INT64_TYPE));
Value* b;
if (i.DS.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.DS.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.DS.DS << 2));
Value* rt = f.ByteSwap(f.LoadOffset(b, offset, INT64_TYPE));
f.StoreGPR(i.DS.RT, rt);
return 0;
}
@ -342,8 +395,15 @@ int InstrEmit_stb(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// MEM(EA, 1) <- (RS)[56:63]
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
f.Store(ea, f.Truncate(f.LoadGPR(i.D.RT), INT8_TYPE));
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
f.StoreOffset(b, offset, f.Truncate(f.LoadGPR(i.D.RT), INT8_TYPE));
return 0;
}
@ -386,8 +446,16 @@ int InstrEmit_sth(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// MEM(EA, 2) <- (RS)[48:63]
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
f.Store(ea, f.ByteSwap(f.Truncate(f.LoadGPR(i.D.RT), INT16_TYPE)));
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
f.StoreOffset(b, offset,
f.ByteSwap(f.Truncate(f.LoadGPR(i.D.RT), INT16_TYPE)));
return 0;
}
@ -430,8 +498,16 @@ int InstrEmit_stw(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(D)
// MEM(EA, 4) <- (RS)[32:63]
Value* ea = CalculateEA_0_i(f, i.D.RA, XEEXTS16(i.D.DS));
f.Store(ea, f.ByteSwap(f.Truncate(f.LoadGPR(i.D.RT), INT32_TYPE)));
Value* b;
if (i.D.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.D.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.D.DS));
f.StoreOffset(b, offset,
f.ByteSwap(f.Truncate(f.LoadGPR(i.D.RT), INT32_TYPE)));
return 0;
}
@ -474,8 +550,15 @@ int InstrEmit_std(PPCHIRBuilder& f, const InstrData& i) {
// b <- (RA)
// EA <- b + EXTS(DS || 0b00)
// MEM(EA, 8) <- (RS)
Value* ea = CalculateEA_0_i(f, i.DS.RA, XEEXTS16(i.DS.DS << 2));
f.Store(ea, f.ByteSwap(f.LoadGPR(i.DS.RT)));
Value* b;
if (i.DS.RA == 0) {
b = f.LoadZeroInt64();
} else {
b = f.LoadGPR(i.DS.RA);
}
Value* offset = f.LoadConstantInt64(XEEXTS16(i.DS.DS << 2));
f.StoreOffset(b, offset, f.ByteSwap(f.LoadGPR(i.DS.RT)));
return 0;
}

View File

@ -33,15 +33,18 @@ bool RawModule::LoadFile(uint32_t base_address, const std::wstring& path) {
// Allocate memory.
// Since we have no real heap just load it wherever.
base_address_ = base_address;
memory_->LookupHeap(base_address_)
->AllocFixed(base_address_, file_length, 0,
kMemoryAllocationReserve | kMemoryAllocationCommit,
kMemoryProtectRead | kMemoryProtectWrite);
auto heap = memory_->LookupHeap(base_address_);
if (!heap ||
!heap->AllocFixed(base_address_, file_length, 0,
kMemoryAllocationReserve | kMemoryAllocationCommit,
kMemoryProtectRead | kMemoryProtectWrite)) {
return false;
}
uint8_t* p = memory_->TranslateVirtual(base_address_);
// Read into memory.
fread(p, file_length, 1, file);
fclose(file);
// Setup debug info.

View File

@ -305,12 +305,12 @@ std::pair<VkBuffer, VkDeviceSize> BufferCache::UploadIndexBuffer(
// TODO(benvanik): memcpy then use compute shaders to swap?
if (format == IndexFormat::kInt16) {
// Endian::k8in16, swap half-words.
xe::copy_and_swap_16_aligned(transient_buffer_->host_base() + offset,
source_ptr, source_length / 2);
xe::copy_and_swap_16_unaligned(transient_buffer_->host_base() + offset,
source_ptr, source_length / 2);
} else if (format == IndexFormat::kInt32) {
// Endian::k8in32, swap words.
xe::copy_and_swap_32_aligned(transient_buffer_->host_base() + offset,
source_ptr, source_length / 4);
xe::copy_and_swap_32_unaligned(transient_buffer_->host_base() + offset,
source_ptr, source_length / 4);
}
transient_buffer_->Flush(offset, source_length);
@ -367,14 +367,11 @@ std::pair<VkBuffer, VkDeviceSize> BufferCache::UploadVertexBuffer(
// TODO(benvanik): memcpy then use compute shaders to swap?
if (endian == Endian::k8in32) {
// Endian::k8in32, swap words.
xe::copy_and_swap_32_aligned(transient_buffer_->host_base() + offset,
upload_ptr, upload_size / 4);
xe::copy_and_swap_32_unaligned(transient_buffer_->host_base() + offset,
upload_ptr, source_length / 4);
} else if (endian == Endian::k16in32) {
// TODO(DrChat): Investigate what 16-in-32 actually does.
assert_always();
xe::copy_and_swap_16_in_32_aligned(transient_buffer_->host_base() + offset,
upload_ptr, upload_size / 4);
xe::copy_and_swap_16_in_32_unaligned(
transient_buffer_->host_base() + offset, upload_ptr, source_length / 4);
} else {
assert_always();
}

View File

@ -171,12 +171,14 @@ bool Memory::Initialize() {
heaps_.vE0000000.Initialize(virtual_membase_, 0xE0000000, 0x1FD00000, 4096,
&heaps_.physical);
// Protect the first 64kb of memory.
// Protect the first and last 64kb of memory.
heaps_.v00000000.AllocFixed(
0x00000000, 64 * 1024, 64 * 1024,
0x00000000, 0x10000, 0x10000,
kMemoryAllocationReserve | kMemoryAllocationCommit,
!FLAGS_protect_zero ? kMemoryProtectRead | kMemoryProtectWrite
: kMemoryProtectNoAccess);
heaps_.physical.AllocFixed(0x1FFF0000, 0x10000, 0x10000,
kMemoryAllocationReserve, kMemoryProtectNoAccess);
// GPU writeback.
// 0xC... is physical, 0x7F... is virtual. We may need to overlay these.

File diff suppressed because it is too large Load Diff

View File

@ -137,6 +137,10 @@ struct loader_layer_properties {
struct loader_name_value enable_env_var;
uint32_t num_component_layers;
char (*component_layer_names)[MAX_STRING_SIZE];
struct {
char enumerate_instance_extension_properties[MAX_STRING_SIZE];
char enumerate_instance_layer_properties[MAX_STRING_SIZE];
} pre_instance_functions;
};
struct loader_layer_list {
@ -375,7 +379,9 @@ static inline void loader_init_dispatch(void *obj, const void *data) {
// Global variables used across files
extern struct loader_struct loader;
extern THREAD_LOCAL_DECL struct loader_instance *tls_instance;
#if defined(_WIN32) && !defined(LOADER_DYNAMIC_LIB)
extern LOADER_PLATFORM_THREAD_ONCE_DEFINITION(once_init);
#endif
extern loader_platform_thread_mutex loader_lock;
extern loader_platform_thread_mutex loader_json_lock;
@ -437,6 +443,7 @@ void loader_scanned_icd_clear(const struct loader_instance *inst, struct loader_
VkResult loader_icd_scan(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list);
void loader_layer_scan(const struct loader_instance *inst, struct loader_layer_list *instance_layers);
void loader_implicit_layer_scan(const struct loader_instance *inst, struct loader_layer_list *instance_layers);
bool loader_is_implicit_layer_enabled(const struct loader_instance *inst, const struct loader_layer_properties *prop);
VkResult loader_get_icd_loader_instance_extensions(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list,
struct loader_extension_list *inst_exts);
struct loader_icd_term *loader_get_icd_and_device(const VkDevice device, struct loader_device **found_dev, uint32_t *icd_index);

View File

@ -26,6 +26,9 @@ project("vulkan-loader")
defines({
"VK_USE_PLATFORM_WIN32_KHR",
})
links({
"Cfgmgr32"
})
filter("platforms:not Windows")
removefiles("dirent_on_windows.c")
filter("platforms:Linux")

View File

@ -46,13 +46,8 @@ LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkI
void *addr;
addr = globalGetProcAddr(pName);
if (instance == VK_NULL_HANDLE) {
// Get entrypoint addresses that are global (no dispatchable object)
if (instance == VK_NULL_HANDLE || addr != NULL) {
return addr;
} else {
// If a global entrypoint return NULL
if (addr) return NULL;
}
struct loader_instance *ptr_instance = loader_get_instance(instance);
@ -99,123 +94,178 @@ LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDev
LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName,
uint32_t *pPropertyCount,
VkExtensionProperties *pProperties) {
struct loader_extension_list *global_ext_list = NULL;
struct loader_layer_list instance_layers;
struct loader_extension_list local_ext_list;
struct loader_icd_tramp_list icd_tramp_list;
uint32_t copy_size;
VkResult res = VK_SUCCESS;
tls_instance = NULL;
memset(&local_ext_list, 0, sizeof(local_ext_list));
memset(&instance_layers, 0, sizeof(instance_layers));
loader_platform_thread_once(&once_init, loader_initialize);
LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
// Get layer libraries if needed
if (pLayerName && strlen(pLayerName) != 0) {
if (vk_string_validate(MaxLoaderStringLength, pLayerName) != VK_STRING_ERROR_NONE) {
assert(VK_FALSE &&
"vkEnumerateInstanceExtensionProperties: "
"pLayerName is too long or is badly formed");
res = VK_ERROR_EXTENSION_NOT_PRESENT;
goto out;
// We know we need to call at least the terminator
VkResult res = VK_SUCCESS;
VkEnumerateInstanceExtensionPropertiesChain chain_tail = {
.header =
{
.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES,
.version = VK_CURRENT_CHAIN_VERSION,
.size = sizeof(chain_tail),
},
.pfnNextLayer = &terminator_EnumerateInstanceExtensionProperties,
.pNextLink = NULL,
};
VkEnumerateInstanceExtensionPropertiesChain *chain_head = &chain_tail;
// Get the implicit layers
struct loader_layer_list layers;
memset(&layers, 0, sizeof(layers));
loader_implicit_layer_scan(NULL, &layers);
// We'll need to save the dl handles so we can close them later
loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
if (libs == NULL) {
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
size_t lib_count = 0;
// Prepend layers onto the chain if they implment this entry point
for (uint32_t i = 0; i < layers.count; ++i) {
if (!loader_is_implicit_layer_enabled(NULL, layers.list + i) ||
layers.list[i].pre_instance_functions.enumerate_instance_extension_properties[0] == '\0') {
continue;
}
loader_layer_scan(NULL, &instance_layers);
for (uint32_t i = 0; i < instance_layers.count; i++) {
struct loader_layer_properties *props = &instance_layers.list[i];
if (strcmp(props->info.layerName, pLayerName) == 0) {
global_ext_list = &props->instance_extension_list;
break;
}
}
} else {
// Scan/discover all ICD libraries
memset(&icd_tramp_list, 0, sizeof(struct loader_icd_tramp_list));
res = loader_icd_scan(NULL, &icd_tramp_list);
if (VK_SUCCESS != res) {
goto out;
}
// Get extensions from all ICD's, merge so no duplicates
res = loader_get_icd_loader_instance_extensions(NULL, &icd_tramp_list, &local_ext_list);
if (VK_SUCCESS != res) {
goto out;
}
loader_scanned_icd_clear(NULL, &icd_tramp_list);
// Append implicit layers.
loader_implicit_layer_scan(NULL, &instance_layers);
for (uint32_t i = 0; i < instance_layers.count; i++) {
struct loader_extension_list *ext_list = &instance_layers.list[i].instance_extension_list;
loader_add_to_ext_list(NULL, &local_ext_list, ext_list->count, ext_list->list);
loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
libs[lib_count++] = layer_lib;
void *pfn = loader_platform_get_proc_address(layer_lib,
layers.list[i].pre_instance_functions.enumerate_instance_extension_properties);
if (pfn == NULL) {
loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
"%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
layers.list[i].pre_instance_functions.enumerate_instance_extension_properties, layers.list[i].lib_name);
continue;
}
global_ext_list = &local_ext_list;
VkEnumerateInstanceExtensionPropertiesChain *chain_link = malloc(sizeof(VkEnumerateInstanceExtensionPropertiesChain));
if (chain_link == NULL) {
res = VK_ERROR_OUT_OF_HOST_MEMORY;
break;
}
chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES;
chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
chain_link->header.size = sizeof(*chain_link);
chain_link->pfnNextLayer = pfn;
chain_link->pNextLink = chain_head;
chain_head = chain_link;
}
if (global_ext_list == NULL) {
res = VK_ERROR_LAYER_NOT_PRESENT;
goto out;
// Call down the chain
if (res == VK_SUCCESS) {
res = chain_head->pfnNextLayer(chain_head->pNextLink, pLayerName, pPropertyCount, pProperties);
}
if (pProperties == NULL) {
*pPropertyCount = global_ext_list->count;
goto out;
// Free up the layers
loader_delete_layer_properties(NULL, &layers);
// Tear down the chain
while (chain_head != &chain_tail) {
VkEnumerateInstanceExtensionPropertiesChain *holder = chain_head;
chain_head = (VkEnumerateInstanceExtensionPropertiesChain *)chain_head->pNextLink;
free(holder);
}
copy_size = *pPropertyCount < global_ext_list->count ? *pPropertyCount : global_ext_list->count;
for (uint32_t i = 0; i < copy_size; i++) {
memcpy(&pProperties[i], &global_ext_list->list[i], sizeof(VkExtensionProperties));
// Close the dl handles
for (size_t i = 0; i < lib_count; ++i) {
loader_platform_close_library(libs[i]);
}
*pPropertyCount = copy_size;
free(libs);
if (copy_size < global_ext_list->count) {
res = VK_INCOMPLETE;
goto out;
}
out:
loader_destroy_generic_list(NULL, (struct loader_generic_list *)&local_ext_list);
loader_delete_layer_properties(NULL, &instance_layers);
return res;
}
LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
VkLayerProperties *pProperties) {
VkResult result = VK_SUCCESS;
struct loader_layer_list instance_layer_list;
tls_instance = NULL;
LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
loader_platform_thread_once(&once_init, loader_initialize);
// We know we need to call at least the terminator
VkResult res = VK_SUCCESS;
VkEnumerateInstanceLayerPropertiesChain chain_tail = {
.header =
{
.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES,
.version = VK_CURRENT_CHAIN_VERSION,
.size = sizeof(chain_tail),
},
.pfnNextLayer = &terminator_EnumerateInstanceLayerProperties,
.pNextLink = NULL,
};
VkEnumerateInstanceLayerPropertiesChain *chain_head = &chain_tail;
uint32_t copy_size;
// Get the implicit layers
struct loader_layer_list layers;
memset(&layers, 0, sizeof(layers));
loader_implicit_layer_scan(NULL, &layers);
// Get layer libraries
memset(&instance_layer_list, 0, sizeof(instance_layer_list));
loader_layer_scan(NULL, &instance_layer_list);
// We'll need to save the dl handles so we can close them later
loader_platform_dl_handle *libs = malloc(sizeof(loader_platform_dl_handle) * layers.count);
if (libs == NULL) {
return VK_ERROR_OUT_OF_HOST_MEMORY;
}
size_t lib_count = 0;
if (pProperties == NULL) {
*pPropertyCount = instance_layer_list.count;
goto out;
// Prepend layers onto the chain if they implment this entry point
for (uint32_t i = 0; i < layers.count; ++i) {
if (!loader_is_implicit_layer_enabled(NULL, layers.list + i) ||
layers.list[i].pre_instance_functions.enumerate_instance_layer_properties[0] == '\0') {
continue;
}
loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
libs[lib_count++] = layer_lib;
void *pfn =
loader_platform_get_proc_address(layer_lib, layers.list[i].pre_instance_functions.enumerate_instance_layer_properties);
if (pfn == NULL) {
loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0,
"%s: Unable to resolve symbol \"%s\" in implicit layer library \"%s\"", __FUNCTION__,
layers.list[i].pre_instance_functions.enumerate_instance_layer_properties, layers.list[i].lib_name);
continue;
}
VkEnumerateInstanceLayerPropertiesChain *chain_link = malloc(sizeof(VkEnumerateInstanceLayerPropertiesChain));
if (chain_link == NULL) {
res = VK_ERROR_OUT_OF_HOST_MEMORY;
break;
}
chain_link->header.type = VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES;
chain_link->header.version = VK_CURRENT_CHAIN_VERSION;
chain_link->header.size = sizeof(*chain_link);
chain_link->pfnNextLayer = pfn;
chain_link->pNextLink = chain_head;
chain_head = chain_link;
}
copy_size = (*pPropertyCount < instance_layer_list.count) ? *pPropertyCount : instance_layer_list.count;
for (uint32_t i = 0; i < copy_size; i++) {
memcpy(&pProperties[i], &instance_layer_list.list[i].info, sizeof(VkLayerProperties));
// Call down the chain
if (res == VK_SUCCESS) {
res = chain_head->pfnNextLayer(chain_head->pNextLink, pPropertyCount, pProperties);
}
*pPropertyCount = copy_size;
// Free up the layers
loader_delete_layer_properties(NULL, &layers);
if (copy_size < instance_layer_list.count) {
result = VK_INCOMPLETE;
goto out;
// Tear down the chain
while (chain_head != &chain_tail) {
VkEnumerateInstanceLayerPropertiesChain *holder = chain_head;
chain_head = (VkEnumerateInstanceLayerPropertiesChain *)chain_head->pNextLink;
free(holder);
}
out:
// Close the dl handles
for (size_t i = 0; i < lib_count; ++i) {
loader_platform_close_library(libs[i]);
}
free(libs);
loader_delete_layer_properties(NULL, &instance_layer_list);
return result;
return res;
}
LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
@ -225,7 +275,7 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCr
bool loaderLocked = false;
VkResult res = VK_ERROR_INITIALIZATION_FAILED;
loader_platform_thread_once(&once_init, loader_initialize);
LOADER_PLATFORM_THREAD_ONCE(&once_init, loader_initialize);
// Fail if the requested Vulkan apiVersion is > 1.0 since the loader only supports 1.0.
// Having pCreateInfo == NULL, pCreateInfo->pApplication == NULL, or

View File

@ -0,0 +1,873 @@
#
# Copyright (c) 2017 The Khronos Group Inc.
# Copyright (c) 2017 Valve Corporation
# Copyright (c) 2017 LunarG, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Lenny Komow <lenny@lunarg.com>
#
# This code is used to pass on device (including physical device) extensions through the call chain. It must do this without
# creating a stack frame, because the actual parameters of the call are not known. Since the first parameter is known to be a
# VkPhysicalDevice or a dispatchable object it can unwrap the object, possibly overwriting the wrapped physical device, and then
# jump to the next function in the call chain
.intel_syntax noprefix
.include "gen_defines.asm"
.ifdef X86_64
.macro PhysDevExtTramp num
.global vkPhysDevExtTramp\num
vkPhysDevExtTramp\num:
mov rax, [rdi]
mov rdi, [rdi + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP]
jmp [rax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * \num))]
.endm
.macro PhysDevExtTermin num
.global vkPhysDevExtTermin\num
vkPhysDevExtTermin\num:
mov rax, [rdi + ICD_TERM_OFFSET_PHYS_DEV_TERM] # Store the loader_icd_term* in rax
cmp qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))], 0 # Check if the next function in the chain is NULL
je terminError\num # Go to the error section if it is NULL
mov rdi, [rdi + PHYS_DEV_OFFSET_PHYS_DEV_TERM] # Load the unwrapped VkPhysicalDevice into the first arg
jmp [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))] # Jump to the next function in the chain
terminError\num:
sub rsp, 56 # Create the stack frame
mov rdi, [rax + INSTANCE_OFFSET_ICD_TERM] # Load the loader_instance into rdi (first arg)
mov r8, [rdi + (HASH_OFFSET_INSTANCE + (HASH_SIZE * \num) + FUNC_NAME_OFFSET_HASH)] # Load the func name into r8 (fifth arg)
lea rcx, termin_error_string@GOTPCREL # Load the error string into rcx (fourth arg)
xor edx, edx # Set rdx to zero (third arg)
lea esi, [rdx + VK_DEBUG_REPORT_ERROR_BIT_EXT] # Write the error logging bit to rsi (second arg)
call loader_log # Log the error message before we crash
add rsp, 56 # Clean up the stack frame
mov rax, 0
jmp rax # Crash intentionally by jumping to address zero
.endm
.macro DevExtTramp num
.global vkdev_ext\num
vkdev_ext\num:
mov rax, [rdi] # Dereference the handle to get the dispatch table
jmp [rax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * \num))] # Jump to the appropriate call chain
.endm
.else
.macro PhysDevExtTramp num
.global vkPhysDevExtTramp\num
vkPhysDevExtTramp\num:
mov eax, [esp + 4] # Load the wrapped VkPhysicalDevice into eax
mov ecx, [eax + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] # Load the unwrapped VkPhysicalDevice into ecx
mov [esp + 4], ecx # Overwrite the wrapped VkPhysicalDevice with the unwrapped one (on the stack)
mov eax, [eax] # Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
jmp [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * \num))] # Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
.endm
.macro PhysDevExtTermin num
.global vkPhysDevExtTermin\num
vkPhysDevExtTermin\num:
mov ecx, [esp + 4] # Move the wrapped VkPhysicalDevice into ecx
mov eax, [ecx + ICD_TERM_OFFSET_PHYS_DEV_TERM] # Store the loader_icd_term* in eax
cmp dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))], 0 # Check if the next function in the chain is NULL
je terminError\num # Go to the error section if it is NULL
mov ecx, [ecx + PHYS_DEV_OFFSET_PHYS_DEV_TERM] # Unwrap the VkPhysicalDevice in ecx
mov [esp + 4], ecx # Copy the unwrapped VkPhysicalDevice into the first arg
jmp [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))] # Jump to the next function in the chain
terminError\num:
mov eax, [eax + INSTANCE_OFFSET_ICD_TERM] # Load the loader_instance into eax
push [eax + (HASH_OFFSET_INSTANCE + (HASH_SIZE * \num) + FUNC_NAME_OFFSET_HASH)] # Push the func name (fifth arg)
push offset termin_error_string@GOT # Push the error string (fourth arg)
push 0 # Push zero (third arg)
push VK_DEBUG_REPORT_ERROR_BIT_EXT # Push the error logging bit (second arg)
push eax # Push the loader_instance (first arg)
call loader_log # Log the error message before we crash
add esp, 20 # Clean up the args
mov eax, 0
jmp eax # Crash intentionally by jumping to address zero
.endm
.macro DevExtTramp num
.global vkdev_ext\num
vkdev_ext\num:
mov eax, [esp + 4] # Dereference the handle to get the dispatch table
jmp [eax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * \num))] # Jump to the appropriate call chain
.endm
.endif
#if defined(__ELF__)
.section .note.GNU-stack,"",%progbits
#endif
.data
termin_error_string:
.string "Extension %s not supported for this physical device"
.text
PhysDevExtTramp 0
PhysDevExtTramp 1
PhysDevExtTramp 2
PhysDevExtTramp 3
PhysDevExtTramp 4
PhysDevExtTramp 5
PhysDevExtTramp 6
PhysDevExtTramp 7
PhysDevExtTramp 8
PhysDevExtTramp 9
PhysDevExtTramp 10
PhysDevExtTramp 11
PhysDevExtTramp 12
PhysDevExtTramp 13
PhysDevExtTramp 14
PhysDevExtTramp 15
PhysDevExtTramp 16
PhysDevExtTramp 17
PhysDevExtTramp 18
PhysDevExtTramp 19
PhysDevExtTramp 20
PhysDevExtTramp 21
PhysDevExtTramp 22
PhysDevExtTramp 23
PhysDevExtTramp 24
PhysDevExtTramp 25
PhysDevExtTramp 26
PhysDevExtTramp 27
PhysDevExtTramp 28
PhysDevExtTramp 29
PhysDevExtTramp 30
PhysDevExtTramp 31
PhysDevExtTramp 32
PhysDevExtTramp 33
PhysDevExtTramp 34
PhysDevExtTramp 35
PhysDevExtTramp 36
PhysDevExtTramp 37
PhysDevExtTramp 38
PhysDevExtTramp 39
PhysDevExtTramp 40
PhysDevExtTramp 41
PhysDevExtTramp 42
PhysDevExtTramp 43
PhysDevExtTramp 44
PhysDevExtTramp 45
PhysDevExtTramp 46
PhysDevExtTramp 47
PhysDevExtTramp 48
PhysDevExtTramp 49
PhysDevExtTramp 50
PhysDevExtTramp 51
PhysDevExtTramp 52
PhysDevExtTramp 53
PhysDevExtTramp 54
PhysDevExtTramp 55
PhysDevExtTramp 56
PhysDevExtTramp 57
PhysDevExtTramp 58
PhysDevExtTramp 59
PhysDevExtTramp 60
PhysDevExtTramp 61
PhysDevExtTramp 62
PhysDevExtTramp 63
PhysDevExtTramp 64
PhysDevExtTramp 65
PhysDevExtTramp 66
PhysDevExtTramp 67
PhysDevExtTramp 68
PhysDevExtTramp 69
PhysDevExtTramp 70
PhysDevExtTramp 71
PhysDevExtTramp 72
PhysDevExtTramp 73
PhysDevExtTramp 74
PhysDevExtTramp 75
PhysDevExtTramp 76
PhysDevExtTramp 77
PhysDevExtTramp 78
PhysDevExtTramp 79
PhysDevExtTramp 80
PhysDevExtTramp 81
PhysDevExtTramp 82
PhysDevExtTramp 83
PhysDevExtTramp 84
PhysDevExtTramp 85
PhysDevExtTramp 86
PhysDevExtTramp 87
PhysDevExtTramp 88
PhysDevExtTramp 89
PhysDevExtTramp 90
PhysDevExtTramp 91
PhysDevExtTramp 92
PhysDevExtTramp 93
PhysDevExtTramp 94
PhysDevExtTramp 95
PhysDevExtTramp 96
PhysDevExtTramp 97
PhysDevExtTramp 98
PhysDevExtTramp 99
PhysDevExtTramp 100
PhysDevExtTramp 101
PhysDevExtTramp 102
PhysDevExtTramp 103
PhysDevExtTramp 104
PhysDevExtTramp 105
PhysDevExtTramp 106
PhysDevExtTramp 107
PhysDevExtTramp 108
PhysDevExtTramp 109
PhysDevExtTramp 110
PhysDevExtTramp 111
PhysDevExtTramp 112
PhysDevExtTramp 113
PhysDevExtTramp 114
PhysDevExtTramp 115
PhysDevExtTramp 116
PhysDevExtTramp 117
PhysDevExtTramp 118
PhysDevExtTramp 119
PhysDevExtTramp 120
PhysDevExtTramp 121
PhysDevExtTramp 122
PhysDevExtTramp 123
PhysDevExtTramp 124
PhysDevExtTramp 125
PhysDevExtTramp 126
PhysDevExtTramp 127
PhysDevExtTramp 128
PhysDevExtTramp 129
PhysDevExtTramp 130
PhysDevExtTramp 131
PhysDevExtTramp 132
PhysDevExtTramp 133
PhysDevExtTramp 134
PhysDevExtTramp 135
PhysDevExtTramp 136
PhysDevExtTramp 137
PhysDevExtTramp 138
PhysDevExtTramp 139
PhysDevExtTramp 140
PhysDevExtTramp 141
PhysDevExtTramp 142
PhysDevExtTramp 143
PhysDevExtTramp 144
PhysDevExtTramp 145
PhysDevExtTramp 146
PhysDevExtTramp 147
PhysDevExtTramp 148
PhysDevExtTramp 149
PhysDevExtTramp 150
PhysDevExtTramp 151
PhysDevExtTramp 152
PhysDevExtTramp 153
PhysDevExtTramp 154
PhysDevExtTramp 155
PhysDevExtTramp 156
PhysDevExtTramp 157
PhysDevExtTramp 158
PhysDevExtTramp 159
PhysDevExtTramp 160
PhysDevExtTramp 161
PhysDevExtTramp 162
PhysDevExtTramp 163
PhysDevExtTramp 164
PhysDevExtTramp 165
PhysDevExtTramp 166
PhysDevExtTramp 167
PhysDevExtTramp 168
PhysDevExtTramp 169
PhysDevExtTramp 170
PhysDevExtTramp 171
PhysDevExtTramp 172
PhysDevExtTramp 173
PhysDevExtTramp 174
PhysDevExtTramp 175
PhysDevExtTramp 176
PhysDevExtTramp 177
PhysDevExtTramp 178
PhysDevExtTramp 179
PhysDevExtTramp 180
PhysDevExtTramp 181
PhysDevExtTramp 182
PhysDevExtTramp 183
PhysDevExtTramp 184
PhysDevExtTramp 185
PhysDevExtTramp 186
PhysDevExtTramp 187
PhysDevExtTramp 188
PhysDevExtTramp 189
PhysDevExtTramp 190
PhysDevExtTramp 191
PhysDevExtTramp 192
PhysDevExtTramp 193
PhysDevExtTramp 194
PhysDevExtTramp 195
PhysDevExtTramp 196
PhysDevExtTramp 197
PhysDevExtTramp 198
PhysDevExtTramp 199
PhysDevExtTramp 200
PhysDevExtTramp 201
PhysDevExtTramp 202
PhysDevExtTramp 203
PhysDevExtTramp 204
PhysDevExtTramp 205
PhysDevExtTramp 206
PhysDevExtTramp 207
PhysDevExtTramp 208
PhysDevExtTramp 209
PhysDevExtTramp 210
PhysDevExtTramp 211
PhysDevExtTramp 212
PhysDevExtTramp 213
PhysDevExtTramp 214
PhysDevExtTramp 215
PhysDevExtTramp 216
PhysDevExtTramp 217
PhysDevExtTramp 218
PhysDevExtTramp 219
PhysDevExtTramp 220
PhysDevExtTramp 221
PhysDevExtTramp 222
PhysDevExtTramp 223
PhysDevExtTramp 224
PhysDevExtTramp 225
PhysDevExtTramp 226
PhysDevExtTramp 227
PhysDevExtTramp 228
PhysDevExtTramp 229
PhysDevExtTramp 230
PhysDevExtTramp 231
PhysDevExtTramp 232
PhysDevExtTramp 233
PhysDevExtTramp 234
PhysDevExtTramp 235
PhysDevExtTramp 236
PhysDevExtTramp 237
PhysDevExtTramp 238
PhysDevExtTramp 239
PhysDevExtTramp 240
PhysDevExtTramp 241
PhysDevExtTramp 242
PhysDevExtTramp 243
PhysDevExtTramp 244
PhysDevExtTramp 245
PhysDevExtTramp 246
PhysDevExtTramp 247
PhysDevExtTramp 248
PhysDevExtTramp 249
PhysDevExtTermin 0
PhysDevExtTermin 1
PhysDevExtTermin 2
PhysDevExtTermin 3
PhysDevExtTermin 4
PhysDevExtTermin 5
PhysDevExtTermin 6
PhysDevExtTermin 7
PhysDevExtTermin 8
PhysDevExtTermin 9
PhysDevExtTermin 10
PhysDevExtTermin 11
PhysDevExtTermin 12
PhysDevExtTermin 13
PhysDevExtTermin 14
PhysDevExtTermin 15
PhysDevExtTermin 16
PhysDevExtTermin 17
PhysDevExtTermin 18
PhysDevExtTermin 19
PhysDevExtTermin 20
PhysDevExtTermin 21
PhysDevExtTermin 22
PhysDevExtTermin 23
PhysDevExtTermin 24
PhysDevExtTermin 25
PhysDevExtTermin 26
PhysDevExtTermin 27
PhysDevExtTermin 28
PhysDevExtTermin 29
PhysDevExtTermin 30
PhysDevExtTermin 31
PhysDevExtTermin 32
PhysDevExtTermin 33
PhysDevExtTermin 34
PhysDevExtTermin 35
PhysDevExtTermin 36
PhysDevExtTermin 37
PhysDevExtTermin 38
PhysDevExtTermin 39
PhysDevExtTermin 40
PhysDevExtTermin 41
PhysDevExtTermin 42
PhysDevExtTermin 43
PhysDevExtTermin 44
PhysDevExtTermin 45
PhysDevExtTermin 46
PhysDevExtTermin 47
PhysDevExtTermin 48
PhysDevExtTermin 49
PhysDevExtTermin 50
PhysDevExtTermin 51
PhysDevExtTermin 52
PhysDevExtTermin 53
PhysDevExtTermin 54
PhysDevExtTermin 55
PhysDevExtTermin 56
PhysDevExtTermin 57
PhysDevExtTermin 58
PhysDevExtTermin 59
PhysDevExtTermin 60
PhysDevExtTermin 61
PhysDevExtTermin 62
PhysDevExtTermin 63
PhysDevExtTermin 64
PhysDevExtTermin 65
PhysDevExtTermin 66
PhysDevExtTermin 67
PhysDevExtTermin 68
PhysDevExtTermin 69
PhysDevExtTermin 70
PhysDevExtTermin 71
PhysDevExtTermin 72
PhysDevExtTermin 73
PhysDevExtTermin 74
PhysDevExtTermin 75
PhysDevExtTermin 76
PhysDevExtTermin 77
PhysDevExtTermin 78
PhysDevExtTermin 79
PhysDevExtTermin 80
PhysDevExtTermin 81
PhysDevExtTermin 82
PhysDevExtTermin 83
PhysDevExtTermin 84
PhysDevExtTermin 85
PhysDevExtTermin 86
PhysDevExtTermin 87
PhysDevExtTermin 88
PhysDevExtTermin 89
PhysDevExtTermin 90
PhysDevExtTermin 91
PhysDevExtTermin 92
PhysDevExtTermin 93
PhysDevExtTermin 94
PhysDevExtTermin 95
PhysDevExtTermin 96
PhysDevExtTermin 97
PhysDevExtTermin 98
PhysDevExtTermin 99
PhysDevExtTermin 100
PhysDevExtTermin 101
PhysDevExtTermin 102
PhysDevExtTermin 103
PhysDevExtTermin 104
PhysDevExtTermin 105
PhysDevExtTermin 106
PhysDevExtTermin 107
PhysDevExtTermin 108
PhysDevExtTermin 109
PhysDevExtTermin 110
PhysDevExtTermin 111
PhysDevExtTermin 112
PhysDevExtTermin 113
PhysDevExtTermin 114
PhysDevExtTermin 115
PhysDevExtTermin 116
PhysDevExtTermin 117
PhysDevExtTermin 118
PhysDevExtTermin 119
PhysDevExtTermin 120
PhysDevExtTermin 121
PhysDevExtTermin 122
PhysDevExtTermin 123
PhysDevExtTermin 124
PhysDevExtTermin 125
PhysDevExtTermin 126
PhysDevExtTermin 127
PhysDevExtTermin 128
PhysDevExtTermin 129
PhysDevExtTermin 130
PhysDevExtTermin 131
PhysDevExtTermin 132
PhysDevExtTermin 133
PhysDevExtTermin 134
PhysDevExtTermin 135
PhysDevExtTermin 136
PhysDevExtTermin 137
PhysDevExtTermin 138
PhysDevExtTermin 139
PhysDevExtTermin 140
PhysDevExtTermin 141
PhysDevExtTermin 142
PhysDevExtTermin 143
PhysDevExtTermin 144
PhysDevExtTermin 145
PhysDevExtTermin 146
PhysDevExtTermin 147
PhysDevExtTermin 148
PhysDevExtTermin 149
PhysDevExtTermin 150
PhysDevExtTermin 151
PhysDevExtTermin 152
PhysDevExtTermin 153
PhysDevExtTermin 154
PhysDevExtTermin 155
PhysDevExtTermin 156
PhysDevExtTermin 157
PhysDevExtTermin 158
PhysDevExtTermin 159
PhysDevExtTermin 160
PhysDevExtTermin 161
PhysDevExtTermin 162
PhysDevExtTermin 163
PhysDevExtTermin 164
PhysDevExtTermin 165
PhysDevExtTermin 166
PhysDevExtTermin 167
PhysDevExtTermin 168
PhysDevExtTermin 169
PhysDevExtTermin 170
PhysDevExtTermin 171
PhysDevExtTermin 172
PhysDevExtTermin 173
PhysDevExtTermin 174
PhysDevExtTermin 175
PhysDevExtTermin 176
PhysDevExtTermin 177
PhysDevExtTermin 178
PhysDevExtTermin 179
PhysDevExtTermin 180
PhysDevExtTermin 181
PhysDevExtTermin 182
PhysDevExtTermin 183
PhysDevExtTermin 184
PhysDevExtTermin 185
PhysDevExtTermin 186
PhysDevExtTermin 187
PhysDevExtTermin 188
PhysDevExtTermin 189
PhysDevExtTermin 190
PhysDevExtTermin 191
PhysDevExtTermin 192
PhysDevExtTermin 193
PhysDevExtTermin 194
PhysDevExtTermin 195
PhysDevExtTermin 196
PhysDevExtTermin 197
PhysDevExtTermin 198
PhysDevExtTermin 199
PhysDevExtTermin 200
PhysDevExtTermin 201
PhysDevExtTermin 202
PhysDevExtTermin 203
PhysDevExtTermin 204
PhysDevExtTermin 205
PhysDevExtTermin 206
PhysDevExtTermin 207
PhysDevExtTermin 208
PhysDevExtTermin 209
PhysDevExtTermin 210
PhysDevExtTermin 211
PhysDevExtTermin 212
PhysDevExtTermin 213
PhysDevExtTermin 214
PhysDevExtTermin 215
PhysDevExtTermin 216
PhysDevExtTermin 217
PhysDevExtTermin 218
PhysDevExtTermin 219
PhysDevExtTermin 220
PhysDevExtTermin 221
PhysDevExtTermin 222
PhysDevExtTermin 223
PhysDevExtTermin 224
PhysDevExtTermin 225
PhysDevExtTermin 226
PhysDevExtTermin 227
PhysDevExtTermin 228
PhysDevExtTermin 229
PhysDevExtTermin 230
PhysDevExtTermin 231
PhysDevExtTermin 232
PhysDevExtTermin 233
PhysDevExtTermin 234
PhysDevExtTermin 235
PhysDevExtTermin 236
PhysDevExtTermin 237
PhysDevExtTermin 238
PhysDevExtTermin 239
PhysDevExtTermin 240
PhysDevExtTermin 241
PhysDevExtTermin 242
PhysDevExtTermin 243
PhysDevExtTermin 244
PhysDevExtTermin 245
PhysDevExtTermin 246
PhysDevExtTermin 247
PhysDevExtTermin 248
PhysDevExtTermin 249
DevExtTramp 0
DevExtTramp 1
DevExtTramp 2
DevExtTramp 3
DevExtTramp 4
DevExtTramp 5
DevExtTramp 6
DevExtTramp 7
DevExtTramp 8
DevExtTramp 9
DevExtTramp 10
DevExtTramp 11
DevExtTramp 12
DevExtTramp 13
DevExtTramp 14
DevExtTramp 15
DevExtTramp 16
DevExtTramp 17
DevExtTramp 18
DevExtTramp 19
DevExtTramp 20
DevExtTramp 21
DevExtTramp 22
DevExtTramp 23
DevExtTramp 24
DevExtTramp 25
DevExtTramp 26
DevExtTramp 27
DevExtTramp 28
DevExtTramp 29
DevExtTramp 30
DevExtTramp 31
DevExtTramp 32
DevExtTramp 33
DevExtTramp 34
DevExtTramp 35
DevExtTramp 36
DevExtTramp 37
DevExtTramp 38
DevExtTramp 39
DevExtTramp 40
DevExtTramp 41
DevExtTramp 42
DevExtTramp 43
DevExtTramp 44
DevExtTramp 45
DevExtTramp 46
DevExtTramp 47
DevExtTramp 48
DevExtTramp 49
DevExtTramp 50
DevExtTramp 51
DevExtTramp 52
DevExtTramp 53
DevExtTramp 54
DevExtTramp 55
DevExtTramp 56
DevExtTramp 57
DevExtTramp 58
DevExtTramp 59
DevExtTramp 60
DevExtTramp 61
DevExtTramp 62
DevExtTramp 63
DevExtTramp 64
DevExtTramp 65
DevExtTramp 66
DevExtTramp 67
DevExtTramp 68
DevExtTramp 69
DevExtTramp 70
DevExtTramp 71
DevExtTramp 72
DevExtTramp 73
DevExtTramp 74
DevExtTramp 75
DevExtTramp 76
DevExtTramp 77
DevExtTramp 78
DevExtTramp 79
DevExtTramp 80
DevExtTramp 81
DevExtTramp 82
DevExtTramp 83
DevExtTramp 84
DevExtTramp 85
DevExtTramp 86
DevExtTramp 87
DevExtTramp 88
DevExtTramp 89
DevExtTramp 90
DevExtTramp 91
DevExtTramp 92
DevExtTramp 93
DevExtTramp 94
DevExtTramp 95
DevExtTramp 96
DevExtTramp 97
DevExtTramp 98
DevExtTramp 99
DevExtTramp 100
DevExtTramp 101
DevExtTramp 102
DevExtTramp 103
DevExtTramp 104
DevExtTramp 105
DevExtTramp 106
DevExtTramp 107
DevExtTramp 108
DevExtTramp 109
DevExtTramp 110
DevExtTramp 111
DevExtTramp 112
DevExtTramp 113
DevExtTramp 114
DevExtTramp 115
DevExtTramp 116
DevExtTramp 117
DevExtTramp 118
DevExtTramp 119
DevExtTramp 120
DevExtTramp 121
DevExtTramp 122
DevExtTramp 123
DevExtTramp 124
DevExtTramp 125
DevExtTramp 126
DevExtTramp 127
DevExtTramp 128
DevExtTramp 129
DevExtTramp 130
DevExtTramp 131
DevExtTramp 132
DevExtTramp 133
DevExtTramp 134
DevExtTramp 135
DevExtTramp 136
DevExtTramp 137
DevExtTramp 138
DevExtTramp 139
DevExtTramp 140
DevExtTramp 141
DevExtTramp 142
DevExtTramp 143
DevExtTramp 144
DevExtTramp 145
DevExtTramp 146
DevExtTramp 147
DevExtTramp 148
DevExtTramp 149
DevExtTramp 150
DevExtTramp 151
DevExtTramp 152
DevExtTramp 153
DevExtTramp 154
DevExtTramp 155
DevExtTramp 156
DevExtTramp 157
DevExtTramp 158
DevExtTramp 159
DevExtTramp 160
DevExtTramp 161
DevExtTramp 162
DevExtTramp 163
DevExtTramp 164
DevExtTramp 165
DevExtTramp 166
DevExtTramp 167
DevExtTramp 168
DevExtTramp 169
DevExtTramp 170
DevExtTramp 171
DevExtTramp 172
DevExtTramp 173
DevExtTramp 174
DevExtTramp 175
DevExtTramp 176
DevExtTramp 177
DevExtTramp 178
DevExtTramp 179
DevExtTramp 180
DevExtTramp 181
DevExtTramp 182
DevExtTramp 183
DevExtTramp 184
DevExtTramp 185
DevExtTramp 186
DevExtTramp 187
DevExtTramp 188
DevExtTramp 189
DevExtTramp 190
DevExtTramp 191
DevExtTramp 192
DevExtTramp 193
DevExtTramp 194
DevExtTramp 195
DevExtTramp 196
DevExtTramp 197
DevExtTramp 198
DevExtTramp 199
DevExtTramp 200
DevExtTramp 201
DevExtTramp 202
DevExtTramp 203
DevExtTramp 204
DevExtTramp 205
DevExtTramp 206
DevExtTramp 207
DevExtTramp 208
DevExtTramp 209
DevExtTramp 210
DevExtTramp 211
DevExtTramp 212
DevExtTramp 213
DevExtTramp 214
DevExtTramp 215
DevExtTramp 216
DevExtTramp 217
DevExtTramp 218
DevExtTramp 219
DevExtTramp 220
DevExtTramp 221
DevExtTramp 222
DevExtTramp 223
DevExtTramp 224
DevExtTramp 225
DevExtTramp 226
DevExtTramp 227
DevExtTramp 228
DevExtTramp 229
DevExtTramp 230
DevExtTramp 231
DevExtTramp 232
DevExtTramp 233
DevExtTramp 234
DevExtTramp 235
DevExtTramp 236
DevExtTramp 237
DevExtTramp 238
DevExtTramp 239
DevExtTramp 240
DevExtTramp 241
DevExtTramp 242
DevExtTramp 243
DevExtTramp 244
DevExtTramp 245
DevExtTramp 246
DevExtTramp 247
DevExtTramp 248
DevExtTramp 249

View File

@ -0,0 +1,883 @@
;
; Copyright (c) 2017 The Khronos Group Inc.
; Copyright (c) 2017 Valve Corporation
; Copyright (c) 2017 LunarG, Inc.
;
; Licensed under the Apache License, Version 2.0 (the "License");
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; http://www.apache.org/licenses/LICENSE-2.0
;
; Unless required by applicable law or agreed to in writing, software
; distributed under the License is distributed on an "AS IS" BASIS,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
;
; Author: Lenny Komow <lenny@lunarg.com>
;
; This code is used to pass on device (including physical device) extensions through the call chain. It must do this without
; creating a stack frame, because the actual parameters of the call are not known. Since the first parameter is known to be a
; VkPhysicalDevice or a dispatchable object it can unwrap the object, possibly overwriting the wrapped physical device, and then
; jump to the next function in the call chain
; Codegen defines a number of values, chiefly offsets of members within structs and sizes of data types within gen_defines.asm.
; Struct member offsets are defined in the format "XX_OFFSET_YY" where XX indicates the member within the struct and YY indicates
; the struct type that it is a member of. Data type sizes are defined in the format "XX_SIZE" where XX indicates the data type.
INCLUDE gen_defines.asm
; 64-bit values and macro
IFDEF rax
PhysDevExtTramp macro num:req
public vkPhysDevExtTramp&num&
vkPhysDevExtTramp&num&:
mov rax, qword ptr [rcx] ; Dereference the wrapped VkPhysicalDevice to get the dispatch table in rax
mov rcx, qword ptr [rcx + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] ; Load the unwrapped VkPhysicalDevice into rcx
jmp qword ptr [rax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * num))] ; Jump to the next function in the chain, preserving the args in other registers
endm
PhysDevExtTermin macro num
public vkPhysDevExtTermin&num&
vkPhysDevExtTermin&num&:
mov rax, qword ptr [rcx + ICD_TERM_OFFSET_PHYS_DEV_TERM] ; Store the loader_icd_term* in rax
cmp qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))], 0 ; Check if the next function in the chain is NULL
je terminError&num& ; Go to the error section if it is NULL
mov rcx, qword ptr [rcx + PHYS_DEV_OFFSET_PHYS_DEV_TERM] ; Load the unwrapped VkPhysicalDevice into the first arg
jmp qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))] ; Jump to the next function in the chain
terminError&num&:
sub rsp, 56 ; Create the stack frame
mov rcx, qword ptr [rax + INSTANCE_OFFSET_ICD_TERM] ; Load the loader_instance into rcx (first arg)
mov rax, qword ptr [rcx + (HASH_OFFSET_INSTANCE + (HASH_SIZE * num) + FUNC_NAME_OFFSET_HASH)] ; Load the func name into rax
lea r9, termin_error_string ; Load the error string into r9 (fourth arg)
xor r8d, r8d ; Set r8 to zero (third arg)
mov qword ptr [rsp + 32], rax ; Move the func name onto the stack (fifth arg)
lea edx, [r8 + VK_DEBUG_REPORT_ERROR_BIT_EXT] ; Write the error logging bit to rdx (second arg)
call loader_log ; Log the error message before we crash
add rsp, 56 ; Clean up the stack frame
mov rax, 0
jmp rax ; Crash intentionally by jumping to address zero
endm
DevExtTramp macro num
public vkdev_ext&num&
vkdev_ext&num&:
mov rax, qword ptr [rcx] ; Dereference the handle to get the dispatch table
jmp qword ptr [rax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * num))] ; Jump to the appropriate call chain
endm
; 32-bit values and macro
ELSE
PhysDevExtTramp macro num
public _vkPhysDevExtTramp&num&@4
_vkPhysDevExtTramp&num&@4:
mov eax, dword ptr [esp + 4] ; Load the wrapped VkPhysicalDevice into eax
mov ecx, [eax + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] ; Load the unwrapped VkPhysicalDevice into ecx
mov [esp + 4], ecx ; Overwrite the wrapped VkPhysicalDevice with the unwrapped one (on the stack)
mov eax, [eax] ; Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
jmp dword ptr [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * num))] ; Jump to the next function in the chain, preserving the args on the stack
endm
PhysDevExtTermin macro num
public _vkPhysDevExtTermin&num&@4
_vkPhysDevExtTermin&num&@4:
mov ecx, dword ptr [esp + 4] ; Move the wrapped VkPhysicalDevice into ecx
mov eax, dword ptr [ecx + ICD_TERM_OFFSET_PHYS_DEV_TERM] ; Store the loader_icd_term* in eax
cmp dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))], 0 ; Check if the next function in the chain is NULL
je terminError&num& ; Go to the error section if it is NULL
mov ecx, dword ptr [ecx + PHYS_DEV_OFFSET_PHYS_DEV_TERM] ; Unwrap the VkPhysicalDevice in ecx
mov dword ptr [esp + 4], ecx ; Copy the unwrapped VkPhysicalDevice into the first arg
jmp dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))] ; Jump to the next function in the chain
terminError&num&:
mov eax, dword ptr [eax + INSTANCE_OFFSET_ICD_TERM] ; Load the loader_instance into eax
push dword ptr [eax + (HASH_OFFSET_INSTANCE + (HASH_SIZE * num) + FUNC_NAME_OFFSET_HASH)] ; Push the func name (fifth arg)
push offset termin_error_string ; Push the error string (fourth arg)
push 0 ; Push zero (third arg)
push VK_DEBUG_REPORT_ERROR_BIT_EXT ; Push the error logging bit (second arg)
push eax ; Push the loader_instance (first arg)
call _loader_log ; Log the error message before we crash
add esp, 20 ; Clean up the args
mov eax, 0
jmp eax ; Crash intentionally by jumping to address zero
endm
DevExtTramp macro num
public _vkdev_ext&num&@4
_vkdev_ext&num&@4:
mov eax, dword ptr [esp + 4] ; Dereference the handle to get the dispatch table
jmp dword ptr [eax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * num))] ; Jump to the appropriate call chain
endm
; This is also needed for 32-bit only
.model flat
ENDIF
.const
termin_error_string db 'Extension %s not supported for this physical device', 0
.code
IFDEF rax
extrn loader_log:near
ELSE
extrn _loader_log:near
ENDIF
PhysDevExtTramp 0
PhysDevExtTramp 1
PhysDevExtTramp 2
PhysDevExtTramp 3
PhysDevExtTramp 4
PhysDevExtTramp 5
PhysDevExtTramp 6
PhysDevExtTramp 7
PhysDevExtTramp 8
PhysDevExtTramp 9
PhysDevExtTramp 10
PhysDevExtTramp 11
PhysDevExtTramp 12
PhysDevExtTramp 13
PhysDevExtTramp 14
PhysDevExtTramp 15
PhysDevExtTramp 16
PhysDevExtTramp 17
PhysDevExtTramp 18
PhysDevExtTramp 19
PhysDevExtTramp 20
PhysDevExtTramp 21
PhysDevExtTramp 22
PhysDevExtTramp 23
PhysDevExtTramp 24
PhysDevExtTramp 25
PhysDevExtTramp 26
PhysDevExtTramp 27
PhysDevExtTramp 28
PhysDevExtTramp 29
PhysDevExtTramp 30
PhysDevExtTramp 31
PhysDevExtTramp 32
PhysDevExtTramp 33
PhysDevExtTramp 34
PhysDevExtTramp 35
PhysDevExtTramp 36
PhysDevExtTramp 37
PhysDevExtTramp 38
PhysDevExtTramp 39
PhysDevExtTramp 40
PhysDevExtTramp 41
PhysDevExtTramp 42
PhysDevExtTramp 43
PhysDevExtTramp 44
PhysDevExtTramp 45
PhysDevExtTramp 46
PhysDevExtTramp 47
PhysDevExtTramp 48
PhysDevExtTramp 49
PhysDevExtTramp 50
PhysDevExtTramp 51
PhysDevExtTramp 52
PhysDevExtTramp 53
PhysDevExtTramp 54
PhysDevExtTramp 55
PhysDevExtTramp 56
PhysDevExtTramp 57
PhysDevExtTramp 58
PhysDevExtTramp 59
PhysDevExtTramp 60
PhysDevExtTramp 61
PhysDevExtTramp 62
PhysDevExtTramp 63
PhysDevExtTramp 64
PhysDevExtTramp 65
PhysDevExtTramp 66
PhysDevExtTramp 67
PhysDevExtTramp 68
PhysDevExtTramp 69
PhysDevExtTramp 70
PhysDevExtTramp 71
PhysDevExtTramp 72
PhysDevExtTramp 73
PhysDevExtTramp 74
PhysDevExtTramp 75
PhysDevExtTramp 76
PhysDevExtTramp 77
PhysDevExtTramp 78
PhysDevExtTramp 79
PhysDevExtTramp 80
PhysDevExtTramp 81
PhysDevExtTramp 82
PhysDevExtTramp 83
PhysDevExtTramp 84
PhysDevExtTramp 85
PhysDevExtTramp 86
PhysDevExtTramp 87
PhysDevExtTramp 88
PhysDevExtTramp 89
PhysDevExtTramp 90
PhysDevExtTramp 91
PhysDevExtTramp 92
PhysDevExtTramp 93
PhysDevExtTramp 94
PhysDevExtTramp 95
PhysDevExtTramp 96
PhysDevExtTramp 97
PhysDevExtTramp 98
PhysDevExtTramp 99
PhysDevExtTramp 100
PhysDevExtTramp 101
PhysDevExtTramp 102
PhysDevExtTramp 103
PhysDevExtTramp 104
PhysDevExtTramp 105
PhysDevExtTramp 106
PhysDevExtTramp 107
PhysDevExtTramp 108
PhysDevExtTramp 109
PhysDevExtTramp 110
PhysDevExtTramp 111
PhysDevExtTramp 112
PhysDevExtTramp 113
PhysDevExtTramp 114
PhysDevExtTramp 115
PhysDevExtTramp 116
PhysDevExtTramp 117
PhysDevExtTramp 118
PhysDevExtTramp 119
PhysDevExtTramp 120
PhysDevExtTramp 121
PhysDevExtTramp 122
PhysDevExtTramp 123
PhysDevExtTramp 124
PhysDevExtTramp 125
PhysDevExtTramp 126
PhysDevExtTramp 127
PhysDevExtTramp 128
PhysDevExtTramp 129
PhysDevExtTramp 130
PhysDevExtTramp 131
PhysDevExtTramp 132
PhysDevExtTramp 133
PhysDevExtTramp 134
PhysDevExtTramp 135
PhysDevExtTramp 136
PhysDevExtTramp 137
PhysDevExtTramp 138
PhysDevExtTramp 139
PhysDevExtTramp 140
PhysDevExtTramp 141
PhysDevExtTramp 142
PhysDevExtTramp 143
PhysDevExtTramp 144
PhysDevExtTramp 145
PhysDevExtTramp 146
PhysDevExtTramp 147
PhysDevExtTramp 148
PhysDevExtTramp 149
PhysDevExtTramp 150
PhysDevExtTramp 151
PhysDevExtTramp 152
PhysDevExtTramp 153
PhysDevExtTramp 154
PhysDevExtTramp 155
PhysDevExtTramp 156
PhysDevExtTramp 157
PhysDevExtTramp 158
PhysDevExtTramp 159
PhysDevExtTramp 160
PhysDevExtTramp 161
PhysDevExtTramp 162
PhysDevExtTramp 163
PhysDevExtTramp 164
PhysDevExtTramp 165
PhysDevExtTramp 166
PhysDevExtTramp 167
PhysDevExtTramp 168
PhysDevExtTramp 169
PhysDevExtTramp 170
PhysDevExtTramp 171
PhysDevExtTramp 172
PhysDevExtTramp 173
PhysDevExtTramp 174
PhysDevExtTramp 175
PhysDevExtTramp 176
PhysDevExtTramp 177
PhysDevExtTramp 178
PhysDevExtTramp 179
PhysDevExtTramp 180
PhysDevExtTramp 181
PhysDevExtTramp 182
PhysDevExtTramp 183
PhysDevExtTramp 184
PhysDevExtTramp 185
PhysDevExtTramp 186
PhysDevExtTramp 187
PhysDevExtTramp 188
PhysDevExtTramp 189
PhysDevExtTramp 190
PhysDevExtTramp 191
PhysDevExtTramp 192
PhysDevExtTramp 193
PhysDevExtTramp 194
PhysDevExtTramp 195
PhysDevExtTramp 196
PhysDevExtTramp 197
PhysDevExtTramp 198
PhysDevExtTramp 199
PhysDevExtTramp 200
PhysDevExtTramp 201
PhysDevExtTramp 202
PhysDevExtTramp 203
PhysDevExtTramp 204
PhysDevExtTramp 205
PhysDevExtTramp 206
PhysDevExtTramp 207
PhysDevExtTramp 208
PhysDevExtTramp 209
PhysDevExtTramp 210
PhysDevExtTramp 211
PhysDevExtTramp 212
PhysDevExtTramp 213
PhysDevExtTramp 214
PhysDevExtTramp 215
PhysDevExtTramp 216
PhysDevExtTramp 217
PhysDevExtTramp 218
PhysDevExtTramp 219
PhysDevExtTramp 220
PhysDevExtTramp 221
PhysDevExtTramp 222
PhysDevExtTramp 223
PhysDevExtTramp 224
PhysDevExtTramp 225
PhysDevExtTramp 226
PhysDevExtTramp 227
PhysDevExtTramp 228
PhysDevExtTramp 229
PhysDevExtTramp 230
PhysDevExtTramp 231
PhysDevExtTramp 232
PhysDevExtTramp 233
PhysDevExtTramp 234
PhysDevExtTramp 235
PhysDevExtTramp 236
PhysDevExtTramp 237
PhysDevExtTramp 238
PhysDevExtTramp 239
PhysDevExtTramp 240
PhysDevExtTramp 241
PhysDevExtTramp 242
PhysDevExtTramp 243
PhysDevExtTramp 244
PhysDevExtTramp 245
PhysDevExtTramp 246
PhysDevExtTramp 247
PhysDevExtTramp 248
PhysDevExtTramp 249
PhysDevExtTermin 0
PhysDevExtTermin 1
PhysDevExtTermin 2
PhysDevExtTermin 3
PhysDevExtTermin 4
PhysDevExtTermin 5
PhysDevExtTermin 6
PhysDevExtTermin 7
PhysDevExtTermin 8
PhysDevExtTermin 9
PhysDevExtTermin 10
PhysDevExtTermin 11
PhysDevExtTermin 12
PhysDevExtTermin 13
PhysDevExtTermin 14
PhysDevExtTermin 15
PhysDevExtTermin 16
PhysDevExtTermin 17
PhysDevExtTermin 18
PhysDevExtTermin 19
PhysDevExtTermin 20
PhysDevExtTermin 21
PhysDevExtTermin 22
PhysDevExtTermin 23
PhysDevExtTermin 24
PhysDevExtTermin 25
PhysDevExtTermin 26
PhysDevExtTermin 27
PhysDevExtTermin 28
PhysDevExtTermin 29
PhysDevExtTermin 30
PhysDevExtTermin 31
PhysDevExtTermin 32
PhysDevExtTermin 33
PhysDevExtTermin 34
PhysDevExtTermin 35
PhysDevExtTermin 36
PhysDevExtTermin 37
PhysDevExtTermin 38
PhysDevExtTermin 39
PhysDevExtTermin 40
PhysDevExtTermin 41
PhysDevExtTermin 42
PhysDevExtTermin 43
PhysDevExtTermin 44
PhysDevExtTermin 45
PhysDevExtTermin 46
PhysDevExtTermin 47
PhysDevExtTermin 48
PhysDevExtTermin 49
PhysDevExtTermin 50
PhysDevExtTermin 51
PhysDevExtTermin 52
PhysDevExtTermin 53
PhysDevExtTermin 54
PhysDevExtTermin 55
PhysDevExtTermin 56
PhysDevExtTermin 57
PhysDevExtTermin 58
PhysDevExtTermin 59
PhysDevExtTermin 60
PhysDevExtTermin 61
PhysDevExtTermin 62
PhysDevExtTermin 63
PhysDevExtTermin 64
PhysDevExtTermin 65
PhysDevExtTermin 66
PhysDevExtTermin 67
PhysDevExtTermin 68
PhysDevExtTermin 69
PhysDevExtTermin 70
PhysDevExtTermin 71
PhysDevExtTermin 72
PhysDevExtTermin 73
PhysDevExtTermin 74
PhysDevExtTermin 75
PhysDevExtTermin 76
PhysDevExtTermin 77
PhysDevExtTermin 78
PhysDevExtTermin 79
PhysDevExtTermin 80
PhysDevExtTermin 81
PhysDevExtTermin 82
PhysDevExtTermin 83
PhysDevExtTermin 84
PhysDevExtTermin 85
PhysDevExtTermin 86
PhysDevExtTermin 87
PhysDevExtTermin 88
PhysDevExtTermin 89
PhysDevExtTermin 90
PhysDevExtTermin 91
PhysDevExtTermin 92
PhysDevExtTermin 93
PhysDevExtTermin 94
PhysDevExtTermin 95
PhysDevExtTermin 96
PhysDevExtTermin 97
PhysDevExtTermin 98
PhysDevExtTermin 99
PhysDevExtTermin 100
PhysDevExtTermin 101
PhysDevExtTermin 102
PhysDevExtTermin 103
PhysDevExtTermin 104
PhysDevExtTermin 105
PhysDevExtTermin 106
PhysDevExtTermin 107
PhysDevExtTermin 108
PhysDevExtTermin 109
PhysDevExtTermin 110
PhysDevExtTermin 111
PhysDevExtTermin 112
PhysDevExtTermin 113
PhysDevExtTermin 114
PhysDevExtTermin 115
PhysDevExtTermin 116
PhysDevExtTermin 117
PhysDevExtTermin 118
PhysDevExtTermin 119
PhysDevExtTermin 120
PhysDevExtTermin 121
PhysDevExtTermin 122
PhysDevExtTermin 123
PhysDevExtTermin 124
PhysDevExtTermin 125
PhysDevExtTermin 126
PhysDevExtTermin 127
PhysDevExtTermin 128
PhysDevExtTermin 129
PhysDevExtTermin 130
PhysDevExtTermin 131
PhysDevExtTermin 132
PhysDevExtTermin 133
PhysDevExtTermin 134
PhysDevExtTermin 135
PhysDevExtTermin 136
PhysDevExtTermin 137
PhysDevExtTermin 138
PhysDevExtTermin 139
PhysDevExtTermin 140
PhysDevExtTermin 141
PhysDevExtTermin 142
PhysDevExtTermin 143
PhysDevExtTermin 144
PhysDevExtTermin 145
PhysDevExtTermin 146
PhysDevExtTermin 147
PhysDevExtTermin 148
PhysDevExtTermin 149
PhysDevExtTermin 150
PhysDevExtTermin 151
PhysDevExtTermin 152
PhysDevExtTermin 153
PhysDevExtTermin 154
PhysDevExtTermin 155
PhysDevExtTermin 156
PhysDevExtTermin 157
PhysDevExtTermin 158
PhysDevExtTermin 159
PhysDevExtTermin 160
PhysDevExtTermin 161
PhysDevExtTermin 162
PhysDevExtTermin 163
PhysDevExtTermin 164
PhysDevExtTermin 165
PhysDevExtTermin 166
PhysDevExtTermin 167
PhysDevExtTermin 168
PhysDevExtTermin 169
PhysDevExtTermin 170
PhysDevExtTermin 171
PhysDevExtTermin 172
PhysDevExtTermin 173
PhysDevExtTermin 174
PhysDevExtTermin 175
PhysDevExtTermin 176
PhysDevExtTermin 177
PhysDevExtTermin 178
PhysDevExtTermin 179
PhysDevExtTermin 180
PhysDevExtTermin 181
PhysDevExtTermin 182
PhysDevExtTermin 183
PhysDevExtTermin 184
PhysDevExtTermin 185
PhysDevExtTermin 186
PhysDevExtTermin 187
PhysDevExtTermin 188
PhysDevExtTermin 189
PhysDevExtTermin 190
PhysDevExtTermin 191
PhysDevExtTermin 192
PhysDevExtTermin 193
PhysDevExtTermin 194
PhysDevExtTermin 195
PhysDevExtTermin 196
PhysDevExtTermin 197
PhysDevExtTermin 198
PhysDevExtTermin 199
PhysDevExtTermin 200
PhysDevExtTermin 201
PhysDevExtTermin 202
PhysDevExtTermin 203
PhysDevExtTermin 204
PhysDevExtTermin 205
PhysDevExtTermin 206
PhysDevExtTermin 207
PhysDevExtTermin 208
PhysDevExtTermin 209
PhysDevExtTermin 210
PhysDevExtTermin 211
PhysDevExtTermin 212
PhysDevExtTermin 213
PhysDevExtTermin 214
PhysDevExtTermin 215
PhysDevExtTermin 216
PhysDevExtTermin 217
PhysDevExtTermin 218
PhysDevExtTermin 219
PhysDevExtTermin 220
PhysDevExtTermin 221
PhysDevExtTermin 222
PhysDevExtTermin 223
PhysDevExtTermin 224
PhysDevExtTermin 225
PhysDevExtTermin 226
PhysDevExtTermin 227
PhysDevExtTermin 228
PhysDevExtTermin 229
PhysDevExtTermin 230
PhysDevExtTermin 231
PhysDevExtTermin 232
PhysDevExtTermin 233
PhysDevExtTermin 234
PhysDevExtTermin 235
PhysDevExtTermin 236
PhysDevExtTermin 237
PhysDevExtTermin 238
PhysDevExtTermin 239
PhysDevExtTermin 240
PhysDevExtTermin 241
PhysDevExtTermin 242
PhysDevExtTermin 243
PhysDevExtTermin 244
PhysDevExtTermin 245
PhysDevExtTermin 246
PhysDevExtTermin 247
PhysDevExtTermin 248
PhysDevExtTermin 249
DevExtTramp 0
DevExtTramp 1
DevExtTramp 2
DevExtTramp 3
DevExtTramp 4
DevExtTramp 5
DevExtTramp 6
DevExtTramp 7
DevExtTramp 8
DevExtTramp 9
DevExtTramp 10
DevExtTramp 11
DevExtTramp 12
DevExtTramp 13
DevExtTramp 14
DevExtTramp 15
DevExtTramp 16
DevExtTramp 17
DevExtTramp 18
DevExtTramp 19
DevExtTramp 20
DevExtTramp 21
DevExtTramp 22
DevExtTramp 23
DevExtTramp 24
DevExtTramp 25
DevExtTramp 26
DevExtTramp 27
DevExtTramp 28
DevExtTramp 29
DevExtTramp 30
DevExtTramp 31
DevExtTramp 32
DevExtTramp 33
DevExtTramp 34
DevExtTramp 35
DevExtTramp 36
DevExtTramp 37
DevExtTramp 38
DevExtTramp 39
DevExtTramp 40
DevExtTramp 41
DevExtTramp 42
DevExtTramp 43
DevExtTramp 44
DevExtTramp 45
DevExtTramp 46
DevExtTramp 47
DevExtTramp 48
DevExtTramp 49
DevExtTramp 50
DevExtTramp 51
DevExtTramp 52
DevExtTramp 53
DevExtTramp 54
DevExtTramp 55
DevExtTramp 56
DevExtTramp 57
DevExtTramp 58
DevExtTramp 59
DevExtTramp 60
DevExtTramp 61
DevExtTramp 62
DevExtTramp 63
DevExtTramp 64
DevExtTramp 65
DevExtTramp 66
DevExtTramp 67
DevExtTramp 68
DevExtTramp 69
DevExtTramp 70
DevExtTramp 71
DevExtTramp 72
DevExtTramp 73
DevExtTramp 74
DevExtTramp 75
DevExtTramp 76
DevExtTramp 77
DevExtTramp 78
DevExtTramp 79
DevExtTramp 80
DevExtTramp 81
DevExtTramp 82
DevExtTramp 83
DevExtTramp 84
DevExtTramp 85
DevExtTramp 86
DevExtTramp 87
DevExtTramp 88
DevExtTramp 89
DevExtTramp 90
DevExtTramp 91
DevExtTramp 92
DevExtTramp 93
DevExtTramp 94
DevExtTramp 95
DevExtTramp 96
DevExtTramp 97
DevExtTramp 98
DevExtTramp 99
DevExtTramp 100
DevExtTramp 101
DevExtTramp 102
DevExtTramp 103
DevExtTramp 104
DevExtTramp 105
DevExtTramp 106
DevExtTramp 107
DevExtTramp 108
DevExtTramp 109
DevExtTramp 110
DevExtTramp 111
DevExtTramp 112
DevExtTramp 113
DevExtTramp 114
DevExtTramp 115
DevExtTramp 116
DevExtTramp 117
DevExtTramp 118
DevExtTramp 119
DevExtTramp 120
DevExtTramp 121
DevExtTramp 122
DevExtTramp 123
DevExtTramp 124
DevExtTramp 125
DevExtTramp 126
DevExtTramp 127
DevExtTramp 128
DevExtTramp 129
DevExtTramp 130
DevExtTramp 131
DevExtTramp 132
DevExtTramp 133
DevExtTramp 134
DevExtTramp 135
DevExtTramp 136
DevExtTramp 137
DevExtTramp 138
DevExtTramp 139
DevExtTramp 140
DevExtTramp 141
DevExtTramp 142
DevExtTramp 143
DevExtTramp 144
DevExtTramp 145
DevExtTramp 146
DevExtTramp 147
DevExtTramp 148
DevExtTramp 149
DevExtTramp 150
DevExtTramp 151
DevExtTramp 152
DevExtTramp 153
DevExtTramp 154
DevExtTramp 155
DevExtTramp 156
DevExtTramp 157
DevExtTramp 158
DevExtTramp 159
DevExtTramp 160
DevExtTramp 161
DevExtTramp 162
DevExtTramp 163
DevExtTramp 164
DevExtTramp 165
DevExtTramp 166
DevExtTramp 167
DevExtTramp 168
DevExtTramp 169
DevExtTramp 170
DevExtTramp 171
DevExtTramp 172
DevExtTramp 173
DevExtTramp 174
DevExtTramp 175
DevExtTramp 176
DevExtTramp 177
DevExtTramp 178
DevExtTramp 179
DevExtTramp 180
DevExtTramp 181
DevExtTramp 182
DevExtTramp 183
DevExtTramp 184
DevExtTramp 185
DevExtTramp 186
DevExtTramp 187
DevExtTramp 188
DevExtTramp 189
DevExtTramp 190
DevExtTramp 191
DevExtTramp 192
DevExtTramp 193
DevExtTramp 194
DevExtTramp 195
DevExtTramp 196
DevExtTramp 197
DevExtTramp 198
DevExtTramp 199
DevExtTramp 200
DevExtTramp 201
DevExtTramp 202
DevExtTramp 203
DevExtTramp 204
DevExtTramp 205
DevExtTramp 206
DevExtTramp 207
DevExtTramp 208
DevExtTramp 209
DevExtTramp 210
DevExtTramp 211
DevExtTramp 212
DevExtTramp 213
DevExtTramp 214
DevExtTramp 215
DevExtTramp 216
DevExtTramp 217
DevExtTramp 218
DevExtTramp 219
DevExtTramp 220
DevExtTramp 221
DevExtTramp 222
DevExtTramp 223
DevExtTramp 224
DevExtTramp 225
DevExtTramp 226
DevExtTramp 227
DevExtTramp 228
DevExtTramp 229
DevExtTramp 230
DevExtTramp 231
DevExtTramp 232
DevExtTramp 233
DevExtTramp 234
DevExtTramp 235
DevExtTramp 236
DevExtTramp 237
DevExtTramp 238
DevExtTramp 239
DevExtTramp 240
DevExtTramp 241
DevExtTramp 242
DevExtTramp 243
DevExtTramp 244
DevExtTramp 245
DevExtTramp 246
DevExtTramp 247
DevExtTramp 248
DevExtTramp 249
end

View File

@ -215,6 +215,9 @@ VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_t
LOOKUP_GIPA(CreateMacOSSurfaceMVK, false);
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
LOOKUP_GIPA(GetPhysicalDeviceMultisamplePropertiesEXT, false);
#undef LOOKUP_GIPA
return true;
@ -421,6 +424,14 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo
table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)gpa(dev, "vkGetBufferMemoryRequirements2KHR");
table->GetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)gpa(dev, "vkGetImageSparseMemoryRequirements2KHR");
// ---- VK_KHR_sampler_ycbcr_conversion extension commands
table->CreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)gpa(dev, "vkCreateSamplerYcbcrConversionKHR");
table->DestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)gpa(dev, "vkDestroySamplerYcbcrConversionKHR");
// ---- VK_KHR_bind_memory2 extension commands
table->BindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)gpa(dev, "vkBindBufferMemory2KHR");
table->BindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)gpa(dev, "vkBindImageMemory2KHR");
// ---- VK_EXT_debug_marker extension commands
table->DebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)gpa(dev, "vkDebugMarkerSetObjectTagEXT");
table->DebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)gpa(dev, "vkDebugMarkerSetObjectNameEXT");
@ -432,6 +443,9 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo
table->CmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)gpa(dev, "vkCmdDrawIndirectCountAMD");
table->CmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)gpa(dev, "vkCmdDrawIndexedIndirectCountAMD");
// ---- VK_AMD_shader_info extension commands
table->GetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)gpa(dev, "vkGetShaderInfoAMD");
// ---- VK_NV_external_memory_win32 extension commands
#ifdef VK_USE_PLATFORM_WIN32_KHR
table->GetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)gpa(dev, "vkGetMemoryWin32HandleNV");
@ -439,13 +453,11 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo
// ---- VK_KHX_device_group extension commands
table->GetDeviceGroupPeerMemoryFeaturesKHX = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX)gpa(dev, "vkGetDeviceGroupPeerMemoryFeaturesKHX");
table->BindBufferMemory2KHX = (PFN_vkBindBufferMemory2KHX)gpa(dev, "vkBindBufferMemory2KHX");
table->BindImageMemory2KHX = (PFN_vkBindImageMemory2KHX)gpa(dev, "vkBindImageMemory2KHX");
table->CmdSetDeviceMaskKHX = (PFN_vkCmdSetDeviceMaskKHX)gpa(dev, "vkCmdSetDeviceMaskKHX");
table->CmdDispatchBaseKHX = (PFN_vkCmdDispatchBaseKHX)gpa(dev, "vkCmdDispatchBaseKHX");
table->GetDeviceGroupPresentCapabilitiesKHX = (PFN_vkGetDeviceGroupPresentCapabilitiesKHX)gpa(dev, "vkGetDeviceGroupPresentCapabilitiesKHX");
table->GetDeviceGroupSurfacePresentModesKHX = (PFN_vkGetDeviceGroupSurfacePresentModesKHX)gpa(dev, "vkGetDeviceGroupSurfacePresentModesKHX");
table->AcquireNextImage2KHX = (PFN_vkAcquireNextImage2KHX)gpa(dev, "vkAcquireNextImage2KHX");
table->CmdDispatchBaseKHX = (PFN_vkCmdDispatchBaseKHX)gpa(dev, "vkCmdDispatchBaseKHX");
// ---- VK_NVX_device_generated_commands extension commands
table->CmdProcessCommandsNVX = (PFN_vkCmdProcessCommandsNVX)gpa(dev, "vkCmdProcessCommandsNVX");
@ -475,6 +487,18 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo
// ---- VK_EXT_hdr_metadata extension commands
table->SetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)gpa(dev, "vkSetHdrMetadataEXT");
// ---- VK_EXT_sample_locations extension commands
table->CmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)gpa(dev, "vkCmdSetSampleLocationsEXT");
// ---- VK_EXT_validation_cache extension commands
table->CreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)gpa(dev, "vkCreateValidationCacheEXT");
table->DestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)gpa(dev, "vkDestroyValidationCacheEXT");
table->MergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)gpa(dev, "vkMergeValidationCachesEXT");
table->GetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)gpa(dev, "vkGetValidationCacheDataEXT");
// ---- VK_EXT_external_memory_host extension commands
table->GetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)gpa(dev, "vkGetMemoryHostPointerPropertiesEXT");
}
// Init Instance function pointer dispatch table with core commands
@ -628,6 +652,9 @@ VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayer
#ifdef VK_USE_PLATFORM_MACOS_MVK
table->CreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK)gpa(inst, "vkCreateMacOSSurfaceMVK");
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
table->GetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)gpa(inst, "vkGetPhysicalDeviceMultisamplePropertiesEXT");
}
// Device command lookup function
@ -825,6 +852,14 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
if (!strcmp(name, "GetBufferMemoryRequirements2KHR")) return (void *)table->GetBufferMemoryRequirements2KHR;
if (!strcmp(name, "GetImageSparseMemoryRequirements2KHR")) return (void *)table->GetImageSparseMemoryRequirements2KHR;
// ---- VK_KHR_sampler_ycbcr_conversion extension commands
if (!strcmp(name, "CreateSamplerYcbcrConversionKHR")) return (void *)table->CreateSamplerYcbcrConversionKHR;
if (!strcmp(name, "DestroySamplerYcbcrConversionKHR")) return (void *)table->DestroySamplerYcbcrConversionKHR;
// ---- VK_KHR_bind_memory2 extension commands
if (!strcmp(name, "BindBufferMemory2KHR")) return (void *)table->BindBufferMemory2KHR;
if (!strcmp(name, "BindImageMemory2KHR")) return (void *)table->BindImageMemory2KHR;
// ---- VK_EXT_debug_marker extension commands
if (!strcmp(name, "DebugMarkerSetObjectTagEXT")) return (void *)table->DebugMarkerSetObjectTagEXT;
if (!strcmp(name, "DebugMarkerSetObjectNameEXT")) return (void *)table->DebugMarkerSetObjectNameEXT;
@ -836,6 +871,9 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
if (!strcmp(name, "CmdDrawIndirectCountAMD")) return (void *)table->CmdDrawIndirectCountAMD;
if (!strcmp(name, "CmdDrawIndexedIndirectCountAMD")) return (void *)table->CmdDrawIndexedIndirectCountAMD;
// ---- VK_AMD_shader_info extension commands
if (!strcmp(name, "GetShaderInfoAMD")) return (void *)table->GetShaderInfoAMD;
// ---- VK_NV_external_memory_win32 extension commands
#ifdef VK_USE_PLATFORM_WIN32_KHR
if (!strcmp(name, "GetMemoryWin32HandleNV")) return (void *)table->GetMemoryWin32HandleNV;
@ -843,13 +881,11 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
// ---- VK_KHX_device_group extension commands
if (!strcmp(name, "GetDeviceGroupPeerMemoryFeaturesKHX")) return (void *)table->GetDeviceGroupPeerMemoryFeaturesKHX;
if (!strcmp(name, "BindBufferMemory2KHX")) return (void *)table->BindBufferMemory2KHX;
if (!strcmp(name, "BindImageMemory2KHX")) return (void *)table->BindImageMemory2KHX;
if (!strcmp(name, "CmdSetDeviceMaskKHX")) return (void *)table->CmdSetDeviceMaskKHX;
if (!strcmp(name, "CmdDispatchBaseKHX")) return (void *)table->CmdDispatchBaseKHX;
if (!strcmp(name, "GetDeviceGroupPresentCapabilitiesKHX")) return (void *)table->GetDeviceGroupPresentCapabilitiesKHX;
if (!strcmp(name, "GetDeviceGroupSurfacePresentModesKHX")) return (void *)table->GetDeviceGroupSurfacePresentModesKHX;
if (!strcmp(name, "AcquireNextImage2KHX")) return (void *)table->AcquireNextImage2KHX;
if (!strcmp(name, "CmdDispatchBaseKHX")) return (void *)table->CmdDispatchBaseKHX;
// ---- VK_NVX_device_generated_commands extension commands
if (!strcmp(name, "CmdProcessCommandsNVX")) return (void *)table->CmdProcessCommandsNVX;
@ -880,6 +916,18 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
// ---- VK_EXT_hdr_metadata extension commands
if (!strcmp(name, "SetHdrMetadataEXT")) return (void *)table->SetHdrMetadataEXT;
// ---- VK_EXT_sample_locations extension commands
if (!strcmp(name, "CmdSetSampleLocationsEXT")) return (void *)table->CmdSetSampleLocationsEXT;
// ---- VK_EXT_validation_cache extension commands
if (!strcmp(name, "CreateValidationCacheEXT")) return (void *)table->CreateValidationCacheEXT;
if (!strcmp(name, "DestroyValidationCacheEXT")) return (void *)table->DestroyValidationCacheEXT;
if (!strcmp(name, "MergeValidationCachesEXT")) return (void *)table->MergeValidationCachesEXT;
if (!strcmp(name, "GetValidationCacheDataEXT")) return (void *)table->GetValidationCacheDataEXT;
// ---- VK_EXT_external_memory_host extension commands
if (!strcmp(name, "GetMemoryHostPointerPropertiesEXT")) return (void *)table->GetMemoryHostPointerPropertiesEXT;
return NULL;
}
@ -1037,6 +1085,9 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerI
if (!strcmp(name, "CreateMacOSSurfaceMVK")) return (void *)table->CreateMacOSSurfaceMVK;
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
if (!strcmp(name, "GetPhysicalDeviceMultisamplePropertiesEXT")) return (void *)table->GetPhysicalDeviceMultisamplePropertiesEXT;
*found_name = false;
return NULL;
}
@ -1268,11 +1319,50 @@ VKAPI_ATTR void VKAPI_CALL GetImageSparseMemoryRequirements2KHR(
}
// ---- VK_KHR_sampler_ycbcr_conversion extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL CreateSamplerYcbcrConversionKHR(
VkDevice device,
const VkSamplerYcbcrConversionCreateInfoKHR* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkSamplerYcbcrConversionKHR* pYcbcrConversion) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->CreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion);
}
VKAPI_ATTR void VKAPI_CALL DestroySamplerYcbcrConversionKHR(
VkDevice device,
VkSamplerYcbcrConversionKHR ycbcrConversion,
const VkAllocationCallbacks* pAllocator) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
disp->DestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
}
// ---- VK_KHR_bind_memory2 extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHR(
VkDevice device,
uint32_t bindInfoCount,
const VkBindBufferMemoryInfoKHR* pBindInfos) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->BindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
}
VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHR(
VkDevice device,
uint32_t bindInfoCount,
const VkBindImageMemoryInfoKHR* pBindInfos) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->BindImageMemory2KHR(device, bindInfoCount, pBindInfos);
}
// ---- VK_EXT_debug_marker extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
VkDevice device,
VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
VkDebugMarkerObjectTagInfoEXT local_tag_info;
memcpy(&local_tag_info, pTagInfo, sizeof(VkDebugMarkerObjectTagInfoEXT));
@ -1286,7 +1376,7 @@ VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectTagEXT(
VKAPI_ATTR VkResult VKAPI_CALL terminator_DebugMarkerSetObjectTagEXT(
VkDevice device,
VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
const VkDebugMarkerObjectTagInfoEXT* pTagInfo) {
uint32_t icd_index = 0;
struct loader_device *dev;
struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
@ -1314,7 +1404,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_DebugMarkerSetObjectTagEXT(
VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
VkDevice device,
VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
VkDebugMarkerObjectNameInfoEXT local_name_info;
memcpy(&local_name_info, pNameInfo, sizeof(VkDebugMarkerObjectNameInfoEXT));
@ -1328,7 +1418,7 @@ VKAPI_ATTR VkResult VKAPI_CALL DebugMarkerSetObjectNameEXT(
VKAPI_ATTR VkResult VKAPI_CALL terminator_DebugMarkerSetObjectNameEXT(
VkDevice device,
VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
const VkDebugMarkerObjectNameInfoEXT* pNameInfo) {
uint32_t icd_index = 0;
struct loader_device *dev;
struct loader_icd_term *icd_term = loader_get_icd_and_device(device, &dev, &icd_index);
@ -1356,7 +1446,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_DebugMarkerSetObjectNameEXT(
VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerBeginEXT(
VkCommandBuffer commandBuffer,
VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo);
}
@ -1369,7 +1459,7 @@ VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerEndEXT(
VKAPI_ATTR void VKAPI_CALL CmdDebugMarkerInsertEXT(
VkCommandBuffer commandBuffer,
VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo);
}
@ -1402,6 +1492,20 @@ VKAPI_ATTR void VKAPI_CALL CmdDrawIndexedIndirectCountAMD(
}
// ---- VK_AMD_shader_info extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL GetShaderInfoAMD(
VkDevice device,
VkPipeline pipeline,
VkShaderStageFlagBits shaderStage,
VkShaderInfoTypeAMD infoType,
size_t* pInfoSize,
void* pInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->GetShaderInfoAMD(device, pipeline, shaderStage, infoType, pInfoSize, pInfo);
}
// ---- VK_NV_external_memory_win32 extension trampoline/terminators
#ifdef VK_USE_PLATFORM_WIN32_KHR
@ -1428,22 +1532,6 @@ VKAPI_ATTR void VKAPI_CALL GetDeviceGroupPeerMemoryFeaturesKHX(
disp->GetDeviceGroupPeerMemoryFeaturesKHX(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
}
VKAPI_ATTR VkResult VKAPI_CALL BindBufferMemory2KHX(
VkDevice device,
uint32_t bindInfoCount,
const VkBindBufferMemoryInfoKHX* pBindInfos) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->BindBufferMemory2KHX(device, bindInfoCount, pBindInfos);
}
VKAPI_ATTR VkResult VKAPI_CALL BindImageMemory2KHX(
VkDevice device,
uint32_t bindInfoCount,
const VkBindImageMemoryInfoKHX* pBindInfos) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->BindImageMemory2KHX(device, bindInfoCount, pBindInfos);
}
VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHX(
VkCommandBuffer commandBuffer,
uint32_t deviceMask) {
@ -1451,6 +1539,18 @@ VKAPI_ATTR void VKAPI_CALL CmdSetDeviceMaskKHX(
disp->CmdSetDeviceMaskKHX(commandBuffer, deviceMask);
}
VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHX(
VkCommandBuffer commandBuffer,
uint32_t baseGroupX,
uint32_t baseGroupY,
uint32_t baseGroupZ,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdDispatchBaseKHX(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
}
VKAPI_ATTR VkResult VKAPI_CALL GetDeviceGroupPresentCapabilitiesKHX(
VkDevice device,
VkDeviceGroupPresentCapabilitiesKHX* pDeviceGroupPresentCapabilities) {
@ -1483,26 +1583,6 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModesKHX(
return VK_SUCCESS;
}
VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHX(
VkDevice device,
const VkAcquireNextImageInfoKHX* pAcquireInfo,
uint32_t* pImageIndex) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->AcquireNextImage2KHX(device, pAcquireInfo, pImageIndex);
}
VKAPI_ATTR void VKAPI_CALL CmdDispatchBaseKHX(
VkCommandBuffer commandBuffer,
uint32_t baseGroupX,
uint32_t baseGroupY,
uint32_t baseGroupZ,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdDispatchBaseKHX(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
}
VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDevicePresentRectanglesKHX(
VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
@ -1533,6 +1613,14 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDevicePresentRectanglesKHX(
return icd_term->dispatch.GetPhysicalDevicePresentRectanglesKHX(phys_dev_term->phys_dev, surface, pRectCount, pRects);
}
VKAPI_ATTR VkResult VKAPI_CALL AcquireNextImage2KHX(
VkDevice device,
const VkAcquireNextImageInfoKHX* pAcquireInfo,
uint32_t* pImageIndex) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->AcquireNextImage2KHX(device, pAcquireInfo, pImageIndex);
}
// ---- VK_NN_vi_surface extension trampoline/terminators
@ -1788,6 +1876,89 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateMacOSSurfaceMVK(
}
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension trampoline/terminators
VKAPI_ATTR void VKAPI_CALL CmdSetSampleLocationsEXT(
VkCommandBuffer commandBuffer,
const VkSampleLocationsInfoEXT* pSampleLocationsInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo);
}
VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceMultisamplePropertiesEXT(
VkPhysicalDevice physicalDevice,
VkSampleCountFlagBits samples,
VkMultisamplePropertiesEXT* pMultisampleProperties) {
const VkLayerInstanceDispatchTable *disp;
VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
disp = loader_get_instance_layer_dispatch(physicalDevice);
disp->GetPhysicalDeviceMultisamplePropertiesEXT(unwrapped_phys_dev, samples, pMultisampleProperties);
}
VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceMultisamplePropertiesEXT(
VkPhysicalDevice physicalDevice,
VkSampleCountFlagBits samples,
VkMultisamplePropertiesEXT* pMultisampleProperties) {
struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
if (NULL == icd_term->dispatch.GetPhysicalDeviceMultisamplePropertiesEXT) {
loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
"ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceMultisamplePropertiesEXT");
}
icd_term->dispatch.GetPhysicalDeviceMultisamplePropertiesEXT(phys_dev_term->phys_dev, samples, pMultisampleProperties);
}
// ---- VK_EXT_validation_cache extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL CreateValidationCacheEXT(
VkDevice device,
const VkValidationCacheCreateInfoEXT* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkValidationCacheEXT* pValidationCache) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->CreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache);
}
VKAPI_ATTR void VKAPI_CALL DestroyValidationCacheEXT(
VkDevice device,
VkValidationCacheEXT validationCache,
const VkAllocationCallbacks* pAllocator) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
disp->DestroyValidationCacheEXT(device, validationCache, pAllocator);
}
VKAPI_ATTR VkResult VKAPI_CALL MergeValidationCachesEXT(
VkDevice device,
VkValidationCacheEXT dstCache,
uint32_t srcCacheCount,
const VkValidationCacheEXT* pSrcCaches) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->MergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches);
}
VKAPI_ATTR VkResult VKAPI_CALL GetValidationCacheDataEXT(
VkDevice device,
VkValidationCacheEXT validationCache,
size_t* pDataSize,
void* pData) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->GetValidationCacheDataEXT(device, validationCache, pDataSize, pData);
}
// ---- VK_EXT_external_memory_host extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL GetMemoryHostPointerPropertiesEXT(
VkDevice device,
VkExternalMemoryHandleTypeFlagBitsKHR handleType,
const void* pHostPointer,
VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->GetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties);
}
// GPA helpers for extensions
bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *name, void **addr) {
*addr = NULL;
@ -1997,6 +2168,26 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
return true;
}
// ---- VK_KHR_sampler_ycbcr_conversion extension commands
if (!strcmp("vkCreateSamplerYcbcrConversionKHR", name)) {
*addr = (void *)CreateSamplerYcbcrConversionKHR;
return true;
}
if (!strcmp("vkDestroySamplerYcbcrConversionKHR", name)) {
*addr = (void *)DestroySamplerYcbcrConversionKHR;
return true;
}
// ---- VK_KHR_bind_memory2 extension commands
if (!strcmp("vkBindBufferMemory2KHR", name)) {
*addr = (void *)BindBufferMemory2KHR;
return true;
}
if (!strcmp("vkBindImageMemory2KHR", name)) {
*addr = (void *)BindImageMemory2KHR;
return true;
}
// ---- VK_EXT_debug_marker extension commands
if (!strcmp("vkDebugMarkerSetObjectTagEXT", name)) {
*addr = (void *)DebugMarkerSetObjectTagEXT;
@ -2029,6 +2220,12 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
return true;
}
// ---- VK_AMD_shader_info extension commands
if (!strcmp("vkGetShaderInfoAMD", name)) {
*addr = (void *)GetShaderInfoAMD;
return true;
}
// ---- VK_NV_external_memory_capabilities extension commands
if (!strcmp("vkGetPhysicalDeviceExternalImageFormatPropertiesNV", name)) {
*addr = (ptr_instance->enabled_known_extensions.nv_external_memory_capabilities == 1)
@ -2050,18 +2247,14 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
*addr = (void *)GetDeviceGroupPeerMemoryFeaturesKHX;
return true;
}
if (!strcmp("vkBindBufferMemory2KHX", name)) {
*addr = (void *)BindBufferMemory2KHX;
return true;
}
if (!strcmp("vkBindImageMemory2KHX", name)) {
*addr = (void *)BindImageMemory2KHX;
return true;
}
if (!strcmp("vkCmdSetDeviceMaskKHX", name)) {
*addr = (void *)CmdSetDeviceMaskKHX;
return true;
}
if (!strcmp("vkCmdDispatchBaseKHX", name)) {
*addr = (void *)CmdDispatchBaseKHX;
return true;
}
if (!strcmp("vkGetDeviceGroupPresentCapabilitiesKHX", name)) {
*addr = (void *)GetDeviceGroupPresentCapabilitiesKHX;
return true;
@ -2070,18 +2263,14 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
*addr = (void *)GetDeviceGroupSurfacePresentModesKHX;
return true;
}
if (!strcmp("vkAcquireNextImage2KHX", name)) {
*addr = (void *)AcquireNextImage2KHX;
return true;
}
if (!strcmp("vkCmdDispatchBaseKHX", name)) {
*addr = (void *)CmdDispatchBaseKHX;
return true;
}
if (!strcmp("vkGetPhysicalDevicePresentRectanglesKHX", name)) {
*addr = (void *)GetPhysicalDevicePresentRectanglesKHX;
return true;
}
if (!strcmp("vkAcquireNextImage2KHX", name)) {
*addr = (void *)AcquireNextImage2KHX;
return true;
}
// ---- VK_NN_vi_surface extension commands
#ifdef VK_USE_PLATFORM_VI_NN
@ -2238,6 +2427,40 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
return true;
}
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
if (!strcmp("vkCmdSetSampleLocationsEXT", name)) {
*addr = (void *)CmdSetSampleLocationsEXT;
return true;
}
if (!strcmp("vkGetPhysicalDeviceMultisamplePropertiesEXT", name)) {
*addr = (void *)GetPhysicalDeviceMultisamplePropertiesEXT;
return true;
}
// ---- VK_EXT_validation_cache extension commands
if (!strcmp("vkCreateValidationCacheEXT", name)) {
*addr = (void *)CreateValidationCacheEXT;
return true;
}
if (!strcmp("vkDestroyValidationCacheEXT", name)) {
*addr = (void *)DestroyValidationCacheEXT;
return true;
}
if (!strcmp("vkMergeValidationCachesEXT", name)) {
*addr = (void *)MergeValidationCachesEXT;
return true;
}
if (!strcmp("vkGetValidationCacheDataEXT", name)) {
*addr = (void *)GetValidationCacheDataEXT;
return true;
}
// ---- VK_EXT_external_memory_host extension commands
if (!strcmp("vkGetMemoryHostPointerPropertiesEXT", name)) {
*addr = (void *)GetMemoryHostPointerPropertiesEXT;
return true;
}
return false;
}
@ -2482,6 +2705,9 @@ const VkLayerInstanceDispatchTable instance_disp = {
#ifdef VK_USE_PLATFORM_MACOS_MVK
.CreateMacOSSurfaceMVK = terminator_CreateMacOSSurfaceMVK,
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
.GetPhysicalDeviceMultisamplePropertiesEXT = terminator_GetPhysicalDeviceMultisamplePropertiesEXT,
};
// A null-terminated list of all of the instance extensions supported by the loader.

View File

@ -128,6 +128,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateDevice(
const VkAllocationCallbacks* pAllocator,
VkDevice* pDevice);
VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceExtensionProperties(
const VkEnumerateInstanceExtensionPropertiesChain* chain,
const char* pLayerName,
uint32_t* pPropertyCount,
VkExtensionProperties* pProperties);
@ -137,6 +138,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceExtensionProperties(
uint32_t* pPropertyCount,
VkExtensionProperties* pProperties);
VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateInstanceLayerProperties(
const VkEnumerateInstanceLayerPropertiesChain* chain,
uint32_t* pPropertyCount,
VkLayerProperties* pProperties);
VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumerateDeviceLayerProperties(
@ -312,6 +314,9 @@ struct loader_icd_term_dispatch {
#ifdef VK_USE_PLATFORM_MACOS_MVK
PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT GetPhysicalDeviceMultisamplePropertiesEXT;
};
union loader_instance_extension_enables {

View File

@ -1,8 +1,8 @@
/*
*
* Copyright (c) 2015-2016 The Khronos Group Inc.
* Copyright (c) 2015-2016 Valve Corporation
* Copyright (c) 2015-2016 LunarG, Inc.
* Copyright (c) 2015-2018 The Khronos Group Inc.
* Copyright (c) 2015-2018 Valve Corporation
* Copyright (c) 2015-2018 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -18,6 +18,7 @@
*
* Author: Ian Elliot <ian@lunarg.com>
* Author: Jon Ashburn <jon@lunarg.com>
* Author: Lenny Komow <lenny@lunarg.com>
*
*/
#pragma once
@ -70,6 +71,7 @@
#define LAYERS_SOURCE_PATH NULL
#endif
#define LAYERS_PATH_ENV "VK_LAYER_PATH"
#define ENABLED_LAYERS_ENV "VK_INSTANCE_LAYERS"
#define RELATIVE_VK_DRIVERS_INFO VULKAN_DIR VULKAN_ICDCONF_DIR
#define RELATIVE_VK_ELAYERS_INFO VULKAN_DIR VULKAN_ELAYERCONF_DIR
@ -116,13 +118,11 @@ static inline const char *loader_platform_get_proc_address_error(const char *nam
// Threads:
typedef pthread_t loader_platform_thread;
#define THREAD_LOCAL_DECL __thread
#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) pthread_once_t var = PTHREAD_ONCE_INIT;
#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) pthread_once_t var;
static inline void loader_platform_thread_once(pthread_once_t *ctl, void (*func)(void)) {
assert(func != NULL);
assert(ctl != NULL);
pthread_once(ctl, func);
}
// The once init functionality is not used on Linux
#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
#define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
// Thread IDs:
typedef pthread_t loader_platform_thread_id;
@ -182,11 +182,38 @@ static inline void loader_platform_thread_cond_broadcast(loader_platform_thread_
#define LAYERS_SOURCE_PATH NULL
#endif
#define LAYERS_PATH_ENV "VK_LAYER_PATH"
#define ENABLED_LAYERS_ENV "VK_INSTANCE_LAYERS"
#define RELATIVE_VK_DRIVERS_INFO ""
#define RELATIVE_VK_ELAYERS_INFO ""
#define RELATIVE_VK_ILAYERS_INFO ""
#define PRINTF_SIZE_T_SPECIFIER "%Iu"
#if defined(_WIN32)
// Get the key for the plug n play driver registry
// The string returned by this function should NOT be freed
static inline const char *LoaderPnpDriverRegistry() {
BOOL is_wow;
IsWow64Process(GetCurrentProcess(), &is_wow);
return is_wow ? (API_NAME "DriverNameWow") : (API_NAME "DriverName");
}
// Get the key for the plug 'n play explicit layer registry
// The string returned by this function should NOT be freed
static inline const char *LoaderPnpELayerRegistry() {
BOOL is_wow;
IsWow64Process(GetCurrentProcess(), &is_wow);
return is_wow ? (API_NAME "ExplicitLayersWow") : (API_NAME "ExplicitLayers");
}
// Get the key for the plug 'n play implicit layer registry
// The string returned by this function should NOT be freed
static inline const char *LoaderPnpILayerRegistry() {
BOOL is_wow;
IsWow64Process(GetCurrentProcess(), &is_wow);
return is_wow ? (API_NAME "ImplicitLayersWow") : (API_NAME "ImplicitLayers");
}
#endif
// File IO
static bool loader_platform_file_exists(const char *path) {
if ((_access(path, 0)) == -1)
@ -254,7 +281,7 @@ static loader_platform_dl_handle loader_platform_open_library(const char *lib_pa
}
static char *loader_platform_open_library_error(const char *libPath) {
static char errorMsg[164];
(void)snprintf(errorMsg, 163, "Failed to open dynamic library \"%s\" with error %d", libPath, GetLastError());
(void)snprintf(errorMsg, 163, "Failed to open dynamic library \"%s\" with error %lu", libPath, GetLastError());
return errorMsg;
}
static void loader_platform_close_library(loader_platform_dl_handle library) { FreeLibrary(library); }
@ -272,19 +299,29 @@ static char *loader_platform_get_proc_address_error(const char *name) {
// Threads:
typedef HANDLE loader_platform_thread;
#define THREAD_LOCAL_DECL __declspec(thread)
// The once init functionality is not used when building a DLL on Windows. This is because there is no way to clean up the
// resources allocated by anything allocated by once init. This isn't a problem for static libraries, but it is for dynamic
// ones. When building a DLL, we use DllMain() instead to allow properly cleaning up resources.
#if defined(LOADER_DYNAMIC_LIB)
#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
#define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
#else
#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) INIT_ONCE var = INIT_ONCE_STATIC_INIT;
#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var;
#define LOADER_PLATFORM_THREAD_ONCE(ctl, func) loader_platform_thread_once_fn(ctl, func)
static BOOL CALLBACK InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) {
void (*func)(void) = (void (*)(void))Parameter;
func();
return TRUE;
}
static void loader_platform_thread_once(void *ctl, void (*func)(void)) {
static void loader_platform_thread_once_fn(void *ctl, void (*func)(void)) {
assert(func != NULL);
assert(ctl != NULL);
InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, func, NULL);
}
#endif
// Thread IDs:
typedef DWORD loader_platform_thread_id;

View File

@ -47,7 +47,7 @@
typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this
// flie directly, it won't be found.
// file directly, it won't be found.
#ifndef PFN_GetPhysicalDeviceProcAddr
typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
#endif

View File

@ -48,6 +48,8 @@
#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2
#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1
#define VK_CURRENT_CHAIN_VERSION 1
// Version negotiation values
typedef enum VkNegotiateLayerStructType {
LAYER_NEGOTIATE_UNINTIALIZED = 0,
@ -138,6 +140,43 @@ extern "C" {
VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct);
typedef enum VkChainType {
VK_CHAIN_TYPE_UNKNOWN = 0,
VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1,
VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2,
} VkChainType;
typedef struct VkChainHeader {
VkChainType type;
uint32_t version;
uint32_t size;
} VkChainHeader;
typedef struct VkEnumerateInstanceExtensionPropertiesChain {
VkChainHeader header;
VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *,
VkExtensionProperties *);
const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink;
#if defined(__cplusplus)
inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const {
return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties);
}
#endif
} VkEnumerateInstanceExtensionPropertiesChain;
typedef struct VkEnumerateInstanceLayerPropertiesChain {
VkChainHeader header;
VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *);
const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink;
#if defined(__cplusplus)
inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const {
return pfnNextLayer(pNextLink, pPropertyCount, pProperties);
}
#endif
} VkEnumerateInstanceLayerPropertiesChain;
#ifdef __cplusplus
}
#endif

View File

@ -177,6 +177,9 @@ typedef struct VkLayerInstanceDispatchTable_ {
#ifdef VK_USE_PLATFORM_MACOS_MVK
PFN_vkCreateMacOSSurfaceMVK CreateMacOSSurfaceMVK;
#endif // VK_USE_PLATFORM_MACOS_MVK
// ---- VK_EXT_sample_locations extension commands
PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT GetPhysicalDeviceMultisamplePropertiesEXT;
} VkLayerInstanceDispatchTable;
// Device function pointer dispatch table
@ -371,6 +374,14 @@ typedef struct VkLayerDispatchTable_ {
PFN_vkGetBufferMemoryRequirements2KHR GetBufferMemoryRequirements2KHR;
PFN_vkGetImageSparseMemoryRequirements2KHR GetImageSparseMemoryRequirements2KHR;
// ---- VK_KHR_sampler_ycbcr_conversion extension commands
PFN_vkCreateSamplerYcbcrConversionKHR CreateSamplerYcbcrConversionKHR;
PFN_vkDestroySamplerYcbcrConversionKHR DestroySamplerYcbcrConversionKHR;
// ---- VK_KHR_bind_memory2 extension commands
PFN_vkBindBufferMemory2KHR BindBufferMemory2KHR;
PFN_vkBindImageMemory2KHR BindImageMemory2KHR;
// ---- VK_EXT_debug_marker extension commands
PFN_vkDebugMarkerSetObjectTagEXT DebugMarkerSetObjectTagEXT;
PFN_vkDebugMarkerSetObjectNameEXT DebugMarkerSetObjectNameEXT;
@ -382,6 +393,9 @@ typedef struct VkLayerDispatchTable_ {
PFN_vkCmdDrawIndirectCountAMD CmdDrawIndirectCountAMD;
PFN_vkCmdDrawIndexedIndirectCountAMD CmdDrawIndexedIndirectCountAMD;
// ---- VK_AMD_shader_info extension commands
PFN_vkGetShaderInfoAMD GetShaderInfoAMD;
// ---- VK_NV_external_memory_win32 extension commands
#ifdef VK_USE_PLATFORM_WIN32_KHR
PFN_vkGetMemoryWin32HandleNV GetMemoryWin32HandleNV;
@ -389,13 +403,11 @@ typedef struct VkLayerDispatchTable_ {
// ---- VK_KHX_device_group extension commands
PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX GetDeviceGroupPeerMemoryFeaturesKHX;
PFN_vkBindBufferMemory2KHX BindBufferMemory2KHX;
PFN_vkBindImageMemory2KHX BindImageMemory2KHX;
PFN_vkCmdSetDeviceMaskKHX CmdSetDeviceMaskKHX;
PFN_vkCmdDispatchBaseKHX CmdDispatchBaseKHX;
PFN_vkGetDeviceGroupPresentCapabilitiesKHX GetDeviceGroupPresentCapabilitiesKHX;
PFN_vkGetDeviceGroupSurfacePresentModesKHX GetDeviceGroupSurfacePresentModesKHX;
PFN_vkAcquireNextImage2KHX AcquireNextImage2KHX;
PFN_vkCmdDispatchBaseKHX CmdDispatchBaseKHX;
// ---- VK_NVX_device_generated_commands extension commands
PFN_vkCmdProcessCommandsNVX CmdProcessCommandsNVX;
@ -425,6 +437,18 @@ typedef struct VkLayerDispatchTable_ {
// ---- VK_EXT_hdr_metadata extension commands
PFN_vkSetHdrMetadataEXT SetHdrMetadataEXT;
// ---- VK_EXT_sample_locations extension commands
PFN_vkCmdSetSampleLocationsEXT CmdSetSampleLocationsEXT;
// ---- VK_EXT_validation_cache extension commands
PFN_vkCreateValidationCacheEXT CreateValidationCacheEXT;
PFN_vkDestroyValidationCacheEXT DestroyValidationCacheEXT;
PFN_vkMergeValidationCachesEXT MergeValidationCachesEXT;
PFN_vkGetValidationCacheDataEXT GetValidationCacheDataEXT;
// ---- VK_EXT_external_memory_host extension commands
PFN_vkGetMemoryHostPointerPropertiesEXT GetMemoryHostPointerPropertiesEXT;
} VkLayerDispatchTable;

View File

@ -43,4 +43,27 @@
#endif // _WIN32
#endif // VK_SDK_PLATFORM_H
// Check for noexcept support using clang, with fallback to Windows or GCC version numbers
#ifndef NOEXCEPT
#if defined(__clang__)
#if __has_feature(cxx_noexcept)
#define HAS_NOEXCEPT
#endif
#else
#if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46
#define HAS_NOEXCEPT
#else
#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS
#define HAS_NOEXCEPT
#endif
#endif
#endif
#ifdef HAS_NOEXCEPT
#define NOEXCEPT noexcept
#else
#define NOEXCEPT
#endif
#endif
#endif // VK_SDK_PLATFORM_H

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff