[Vulkan] Immediate drawer textures
This commit is contained in:
parent
53adafa156
commit
886129cefa
|
@ -43,8 +43,11 @@ constexpr T align(T value, T alignment) {
|
||||||
|
|
||||||
// Rounds the given number up to the next highest multiple.
|
// Rounds the given number up to the next highest multiple.
|
||||||
template <typename T, typename V>
|
template <typename T, typename V>
|
||||||
constexpr T round_up(T value, V multiple) {
|
constexpr T round_up(T value, V multiple, bool force_non_zero = true) {
|
||||||
return value ? (((value + multiple - 1) / multiple) * multiple) : multiple;
|
if (force_non_zero && !value) {
|
||||||
|
return multiple;
|
||||||
|
}
|
||||||
|
return (value + multiple - 1) / multiple * multiple;
|
||||||
}
|
}
|
||||||
|
|
||||||
constexpr float saturate(float value) {
|
constexpr float saturate(float value) {
|
||||||
|
|
|
@ -104,7 +104,7 @@ bool DebugWindow::Initialize() {
|
||||||
|
|
||||||
// Create the graphics context used for drawing.
|
// Create the graphics context used for drawing.
|
||||||
auto provider = emulator_->display_window()->context()->provider();
|
auto provider = emulator_->display_window()->context()->provider();
|
||||||
window_->set_context(provider->CreateContext(window_.get()));
|
window_->set_context(provider->CreateHostContext(window_.get()));
|
||||||
|
|
||||||
// Enable imgui input.
|
// Enable imgui input.
|
||||||
window_->set_imgui_input_enabled(true);
|
window_->set_imgui_input_enabled(true);
|
||||||
|
|
|
@ -61,15 +61,16 @@ X_STATUS GraphicsSystem::Setup(cpu::Processor* processor,
|
||||||
target_window_->loop()->PostSynchronous([&]() {
|
target_window_->loop()->PostSynchronous([&]() {
|
||||||
// Create the context used for presentation.
|
// Create the context used for presentation.
|
||||||
assert_null(target_window->context());
|
assert_null(target_window->context());
|
||||||
target_window_->set_context(provider_->CreateContext(target_window_));
|
target_window_->set_context(
|
||||||
|
provider_->CreateHostContext(target_window_));
|
||||||
|
|
||||||
// Setup the context the command processor will do all its drawing in.
|
// Setup the context the command processor will do all its drawing in.
|
||||||
// It's shared with the display context so that we can resolve
|
// It's shared with the display context so that we can resolve
|
||||||
// framebuffers from it.
|
// framebuffers from it.
|
||||||
processor_context = provider()->CreateOffscreenContext();
|
processor_context = provider()->CreateEmulationContext();
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
processor_context = provider()->CreateOffscreenContext();
|
processor_context = provider()->CreateEmulationContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!processor_context) {
|
if (!processor_context) {
|
||||||
|
|
|
@ -127,7 +127,7 @@ int hid_demo_main(const std::vector<std::string>& args) {
|
||||||
// The window will finish initialization wtih the context (loading
|
// The window will finish initialization wtih the context (loading
|
||||||
// resources, etc).
|
// resources, etc).
|
||||||
graphics_provider = CreateDemoGraphicsProvider(window.get());
|
graphics_provider = CreateDemoGraphicsProvider(window.get());
|
||||||
window->set_context(graphics_provider->CreateContext(window.get()));
|
window->set_context(graphics_provider->CreateHostContext(window.get()));
|
||||||
|
|
||||||
// Initialize input system and all drivers.
|
// Initialize input system and all drivers.
|
||||||
input_system_ = std::make_unique<xe::hid::InputSystem>(window.get());
|
input_system_ = std::make_unique<xe::hid::InputSystem>(window.get());
|
||||||
|
|
|
@ -439,7 +439,7 @@ bool D3D12Provider::Initialize() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<GraphicsContext> D3D12Provider::CreateContext(
|
std::unique_ptr<GraphicsContext> D3D12Provider::CreateHostContext(
|
||||||
Window* target_window) {
|
Window* target_window) {
|
||||||
auto new_context =
|
auto new_context =
|
||||||
std::unique_ptr<D3D12Context>(new D3D12Context(this, target_window));
|
std::unique_ptr<D3D12Context>(new D3D12Context(this, target_window));
|
||||||
|
@ -449,7 +449,7 @@ std::unique_ptr<GraphicsContext> D3D12Provider::CreateContext(
|
||||||
return std::unique_ptr<GraphicsContext>(new_context.release());
|
return std::unique_ptr<GraphicsContext>(new_context.release());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<GraphicsContext> D3D12Provider::CreateOffscreenContext() {
|
std::unique_ptr<GraphicsContext> D3D12Provider::CreateEmulationContext() {
|
||||||
auto new_context =
|
auto new_context =
|
||||||
std::unique_ptr<D3D12Context>(new D3D12Context(this, nullptr));
|
std::unique_ptr<D3D12Context>(new D3D12Context(this, nullptr));
|
||||||
if (!new_context->Initialize()) {
|
if (!new_context->Initialize()) {
|
||||||
|
|
|
@ -27,9 +27,9 @@ class D3D12Provider : public GraphicsProvider {
|
||||||
|
|
||||||
static std::unique_ptr<D3D12Provider> Create(Window* main_window);
|
static std::unique_ptr<D3D12Provider> Create(Window* main_window);
|
||||||
|
|
||||||
std::unique_ptr<GraphicsContext> CreateContext(
|
std::unique_ptr<GraphicsContext> CreateHostContext(
|
||||||
Window* target_window) override;
|
Window* target_window) override;
|
||||||
std::unique_ptr<GraphicsContext> CreateOffscreenContext() override;
|
std::unique_ptr<GraphicsContext> CreateEmulationContext() override;
|
||||||
|
|
||||||
IDXGIFactory2* GetDXGIFactory() const { return dxgi_factory_; }
|
IDXGIFactory2* GetDXGIFactory() const { return dxgi_factory_; }
|
||||||
// nullptr if PIX not attached.
|
// nullptr if PIX not attached.
|
||||||
|
|
|
@ -28,13 +28,13 @@ class GraphicsProvider {
|
||||||
// The 'main' window of an application, used to query provider information.
|
// The 'main' window of an application, used to query provider information.
|
||||||
Window* main_window() const { return main_window_; }
|
Window* main_window() const { return main_window_; }
|
||||||
|
|
||||||
// Creates a new graphics context and swapchain for presenting to a window.
|
// Creates a new host-side graphics context and swapchain, possibly presenting
|
||||||
virtual std::unique_ptr<GraphicsContext> CreateContext(
|
// to a window and using the immediate drawer.
|
||||||
|
virtual std::unique_ptr<GraphicsContext> CreateHostContext(
|
||||||
Window* target_window) = 0;
|
Window* target_window) = 0;
|
||||||
|
|
||||||
// Creates a new offscreen graphics context without a swapchain or immediate
|
// Creates a new offscreen emulation graphics context.
|
||||||
// drawer.
|
virtual std::unique_ptr<GraphicsContext> CreateEmulationContext() = 0;
|
||||||
virtual std::unique_ptr<GraphicsContext> CreateOffscreenContext() = 0;
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
explicit GraphicsProvider(Window* main_window) : main_window_(main_window) {}
|
explicit GraphicsProvider(Window* main_window) : main_window_(main_window) {}
|
||||||
|
|
|
@ -2,13 +2,13 @@
|
||||||
// source: immediate.frag
|
// source: immediate.frag
|
||||||
const uint8_t immediate_frag[] = {
|
const uint8_t immediate_frag[] = {
|
||||||
0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0A, 0x00, 0x08, 0x00,
|
0x03, 0x02, 0x23, 0x07, 0x00, 0x00, 0x01, 0x00, 0x0A, 0x00, 0x08, 0x00,
|
||||||
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00,
|
0x19, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x00, 0x02, 0x00,
|
||||||
0x01, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x06, 0x00, 0x01, 0x00, 0x00, 0x00,
|
0x01, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x06, 0x00, 0x01, 0x00, 0x00, 0x00,
|
||||||
0x47, 0x4C, 0x53, 0x4C, 0x2E, 0x73, 0x74, 0x64, 0x2E, 0x34, 0x35, 0x30,
|
0x47, 0x4C, 0x53, 0x4C, 0x2E, 0x73, 0x74, 0x64, 0x2E, 0x34, 0x35, 0x30,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x01, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x08, 0x00, 0x04, 0x00, 0x00, 0x00,
|
0x01, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x08, 0x00, 0x04, 0x00, 0x00, 0x00,
|
||||||
0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E, 0x00, 0x00, 0x00, 0x00,
|
0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x09, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00,
|
0x09, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
|
||||||
0x10, 0x00, 0x03, 0x00, 0x04, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
|
0x10, 0x00, 0x03, 0x00, 0x04, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
|
||||||
0x03, 0x00, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x01, 0x00, 0x00,
|
0x03, 0x00, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x01, 0x00, 0x00,
|
||||||
0x05, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E,
|
0x05, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x69, 0x6E,
|
||||||
|
@ -16,32 +16,56 @@ const uint8_t immediate_frag[] = {
|
||||||
0x78, 0x65, 0x5F, 0x66, 0x72, 0x61, 0x67, 0x5F, 0x63, 0x6F, 0x6C, 0x6F,
|
0x78, 0x65, 0x5F, 0x66, 0x72, 0x61, 0x67, 0x5F, 0x63, 0x6F, 0x6C, 0x6F,
|
||||||
0x72, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x0B, 0x00, 0x00, 0x00,
|
0x72, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x0B, 0x00, 0x00, 0x00,
|
||||||
0x78, 0x65, 0x5F, 0x76, 0x61, 0x72, 0x5F, 0x63, 0x6F, 0x6C, 0x6F, 0x72,
|
0x78, 0x65, 0x5F, 0x76, 0x61, 0x72, 0x5F, 0x63, 0x6F, 0x6C, 0x6F, 0x72,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x0F, 0x00, 0x00, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x08, 0x00, 0x10, 0x00, 0x00, 0x00,
|
||||||
0x78, 0x65, 0x5F, 0x76, 0x61, 0x72, 0x5F, 0x74, 0x65, 0x78, 0x63, 0x6F,
|
0x78, 0x65, 0x5F, 0x69, 0x6D, 0x6D, 0x65, 0x64, 0x69, 0x61, 0x74, 0x65,
|
||||||
0x6F, 0x72, 0x64, 0x00, 0x47, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00,
|
0x5F, 0x74, 0x65, 0x78, 0x74, 0x75, 0x72, 0x65, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x09, 0x00, 0x00, 0x00,
|
0x05, 0x00, 0x06, 0x00, 0x14, 0x00, 0x00, 0x00, 0x78, 0x65, 0x5F, 0x76,
|
||||||
|
0x61, 0x72, 0x5F, 0x74, 0x65, 0x78, 0x63, 0x6F, 0x6F, 0x72, 0x64, 0x00,
|
||||||
|
0x47, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x47, 0x00, 0x04, 0x00, 0x09, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x0B, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x0B, 0x00, 0x00, 0x00,
|
||||||
|
0x1E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
|
||||||
|
0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
|
||||||
|
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
|
||||||
|
0x10, 0x00, 0x00, 0x00, 0x22, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x47, 0x00, 0x04, 0x00, 0x10, 0x00, 0x00, 0x00, 0x21, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00, 0x11, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00, 0x14, 0x00, 0x00, 0x00,
|
||||||
0x1E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
|
0x1E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
|
||||||
0x0B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x04, 0x00,
|
0x17, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x47, 0x00, 0x03, 0x00,
|
||||||
0x0B, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
|
0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00,
|
||||||
0x47, 0x00, 0x03, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x02, 0x00, 0x00, 0x00, 0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00,
|
||||||
0x47, 0x00, 0x04, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x1E, 0x00, 0x00, 0x00,
|
0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x03, 0x00, 0x06, 0x00, 0x00, 0x00,
|
||||||
0x00, 0x00, 0x00, 0x00, 0x13, 0x00, 0x02, 0x00, 0x02, 0x00, 0x00, 0x00,
|
0x20, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
|
||||||
0x21, 0x00, 0x03, 0x00, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
|
0x06, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00,
|
||||||
0x16, 0x00, 0x03, 0x00, 0x06, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00,
|
0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00,
|
||||||
0x17, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,
|
0x3B, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00,
|
||||||
0x04, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00,
|
0x03, 0x00, 0x00, 0x00, 0x20, 0x00, 0x04, 0x00, 0x0A, 0x00, 0x00, 0x00,
|
||||||
0x03, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00,
|
0x01, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00,
|
||||||
0x08, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
|
0x0A, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
|
||||||
0x20, 0x00, 0x04, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
|
0x19, 0x00, 0x09, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00,
|
||||||
0x07, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x0A, 0x00, 0x00, 0x00,
|
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x0B, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
|
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x0D, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
|
0x1B, 0x00, 0x03, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x0D, 0x00, 0x00, 0x00,
|
||||||
0x20, 0x00, 0x04, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
|
0x20, 0x00, 0x04, 0x00, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
0x0D, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x0E, 0x00, 0x00, 0x00,
|
0x0E, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x0F, 0x00, 0x00, 0x00,
|
||||||
0x0F, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x36, 0x00, 0x05, 0x00,
|
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x00, 0x04, 0x00,
|
||||||
0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
0x12, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
|
||||||
0x03, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00, 0x05, 0x00, 0x00, 0x00,
|
0x20, 0x00, 0x04, 0x00, 0x13, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
|
||||||
0x3D, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
|
0x12, 0x00, 0x00, 0x00, 0x3B, 0x00, 0x04, 0x00, 0x13, 0x00, 0x00, 0x00,
|
||||||
0x0B, 0x00, 0x00, 0x00, 0x3E, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00,
|
0x14, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x2B, 0x00, 0x04, 0x00,
|
||||||
0x0C, 0x00, 0x00, 0x00, 0xFD, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
|
0x06, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||||
|
0x36, 0x00, 0x05, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
|
||||||
|
0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF8, 0x00, 0x02, 0x00,
|
||||||
|
0x05, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00, 0x07, 0x00, 0x00, 0x00,
|
||||||
|
0x0C, 0x00, 0x00, 0x00, 0x0B, 0x00, 0x00, 0x00, 0x3D, 0x00, 0x04, 0x00,
|
||||||
|
0x0E, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
|
||||||
|
0x3D, 0x00, 0x04, 0x00, 0x12, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00,
|
||||||
|
0x14, 0x00, 0x00, 0x00, 0x58, 0x00, 0x07, 0x00, 0x07, 0x00, 0x00, 0x00,
|
||||||
|
0x17, 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x15, 0x00, 0x00, 0x00,
|
||||||
|
0x02, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00, 0x85, 0x00, 0x05, 0x00,
|
||||||
|
0x07, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
|
||||||
|
0x17, 0x00, 0x00, 0x00, 0x3E, 0x00, 0x03, 0x00, 0x09, 0x00, 0x00, 0x00,
|
||||||
|
0x18, 0x00, 0x00, 0x00, 0xFD, 0x00, 0x01, 0x00, 0x38, 0x00, 0x01, 0x00,
|
||||||
};
|
};
|
||||||
|
|
Binary file not shown.
|
@ -1,7 +1,7 @@
|
||||||
; SPIR-V
|
; SPIR-V
|
||||||
; Version: 1.0
|
; Version: 1.0
|
||||||
; Generator: Khronos Glslang Reference Front End; 10
|
; Generator: Khronos Glslang Reference Front End; 10
|
||||||
; Bound: 16
|
; Bound: 25
|
||||||
; Schema: 0
|
; Schema: 0
|
||||||
OpCapability Shader
|
OpCapability Shader
|
||||||
%1 = OpExtInstImport "GLSL.std.450"
|
%1 = OpExtInstImport "GLSL.std.450"
|
||||||
|
@ -12,13 +12,20 @@
|
||||||
OpName %main "main"
|
OpName %main "main"
|
||||||
OpName %xe_frag_color "xe_frag_color"
|
OpName %xe_frag_color "xe_frag_color"
|
||||||
OpName %xe_var_color "xe_var_color"
|
OpName %xe_var_color "xe_var_color"
|
||||||
|
OpName %xe_immediate_texture "xe_immediate_texture"
|
||||||
OpName %xe_var_texcoord "xe_var_texcoord"
|
OpName %xe_var_texcoord "xe_var_texcoord"
|
||||||
OpDecorate %xe_frag_color RelaxedPrecision
|
OpDecorate %xe_frag_color RelaxedPrecision
|
||||||
OpDecorate %xe_frag_color Location 0
|
OpDecorate %xe_frag_color Location 0
|
||||||
OpDecorate %xe_var_color RelaxedPrecision
|
OpDecorate %xe_var_color RelaxedPrecision
|
||||||
OpDecorate %xe_var_color Location 1
|
OpDecorate %xe_var_color Location 1
|
||||||
OpDecorate %12 RelaxedPrecision
|
OpDecorate %12 RelaxedPrecision
|
||||||
|
OpDecorate %xe_immediate_texture RelaxedPrecision
|
||||||
|
OpDecorate %xe_immediate_texture DescriptorSet 0
|
||||||
|
OpDecorate %xe_immediate_texture Binding 0
|
||||||
|
OpDecorate %17 RelaxedPrecision
|
||||||
OpDecorate %xe_var_texcoord Location 0
|
OpDecorate %xe_var_texcoord Location 0
|
||||||
|
OpDecorate %23 RelaxedPrecision
|
||||||
|
OpDecorate %24 RelaxedPrecision
|
||||||
%void = OpTypeVoid
|
%void = OpTypeVoid
|
||||||
%3 = OpTypeFunction %void
|
%3 = OpTypeFunction %void
|
||||||
%float = OpTypeFloat 32
|
%float = OpTypeFloat 32
|
||||||
|
@ -27,12 +34,21 @@
|
||||||
%xe_frag_color = OpVariable %_ptr_Output_v4float Output
|
%xe_frag_color = OpVariable %_ptr_Output_v4float Output
|
||||||
%_ptr_Input_v4float = OpTypePointer Input %v4float
|
%_ptr_Input_v4float = OpTypePointer Input %v4float
|
||||||
%xe_var_color = OpVariable %_ptr_Input_v4float Input
|
%xe_var_color = OpVariable %_ptr_Input_v4float Input
|
||||||
|
%13 = OpTypeImage %float 2D 0 0 0 1 Unknown
|
||||||
|
%14 = OpTypeSampledImage %13
|
||||||
|
%_ptr_UniformConstant_14 = OpTypePointer UniformConstant %14
|
||||||
|
%xe_immediate_texture = OpVariable %_ptr_UniformConstant_14 UniformConstant
|
||||||
%v2float = OpTypeVector %float 2
|
%v2float = OpTypeVector %float 2
|
||||||
%_ptr_Input_v2float = OpTypePointer Input %v2float
|
%_ptr_Input_v2float = OpTypePointer Input %v2float
|
||||||
%xe_var_texcoord = OpVariable %_ptr_Input_v2float Input
|
%xe_var_texcoord = OpVariable %_ptr_Input_v2float Input
|
||||||
|
%float_0 = OpConstant %float 0
|
||||||
%main = OpFunction %void None %3
|
%main = OpFunction %void None %3
|
||||||
%5 = OpLabel
|
%5 = OpLabel
|
||||||
%12 = OpLoad %v4float %xe_var_color
|
%12 = OpLoad %v4float %xe_var_color
|
||||||
OpStore %xe_frag_color %12
|
%17 = OpLoad %14 %xe_immediate_texture
|
||||||
|
%21 = OpLoad %v2float %xe_var_texcoord
|
||||||
|
%23 = OpImageSampleExplicitLod %v4float %17 %21 Lod %float_0
|
||||||
|
%24 = OpFMul %v4float %12 %23
|
||||||
|
OpStore %xe_frag_color %24
|
||||||
OpReturn
|
OpReturn
|
||||||
OpFunctionEnd
|
OpFunctionEnd
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
#version 310 es
|
#version 310 es
|
||||||
precision highp float;
|
precision highp float;
|
||||||
|
|
||||||
|
layout(set = 0, binding = 0) uniform lowp sampler2D xe_immediate_texture;
|
||||||
|
|
||||||
layout(location = 0) in vec2 xe_var_texcoord;
|
layout(location = 0) in vec2 xe_var_texcoord;
|
||||||
layout(location = 1) in lowp vec4 xe_var_color;
|
layout(location = 1) in lowp vec4 xe_var_color;
|
||||||
|
|
||||||
layout(location = 0) out lowp vec4 xe_frag_color;
|
layout(location = 0) out lowp vec4 xe_frag_color;
|
||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
xe_frag_color = xe_var_color;
|
xe_frag_color =
|
||||||
|
xe_var_color * textureLod(xe_immediate_texture, xe_var_texcoord, 0.0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "xenia/base/assert.h"
|
#include "xenia/base/assert.h"
|
||||||
|
@ -151,14 +152,21 @@ void VulkanContext::Shutdown() {
|
||||||
util::DestroyAndNullHandle(dfn.vkDestroySemaphore, device,
|
util::DestroyAndNullHandle(dfn.vkDestroySemaphore, device,
|
||||||
swap_image_acquisition_semaphore_);
|
swap_image_acquisition_semaphore_);
|
||||||
|
|
||||||
|
swap_submission_completed_ = 0;
|
||||||
|
swap_submission_current_ = 1;
|
||||||
for (uint32_t i = 0; i < kSwapchainMaxImageCount; ++i) {
|
for (uint32_t i = 0; i < kSwapchainMaxImageCount; ++i) {
|
||||||
SwapSubmission& submission = swap_submissions_[i];
|
SwapSubmission& submission = swap_submissions_[i];
|
||||||
|
submission.setup_command_buffer_index = UINT32_MAX;
|
||||||
util::DestroyAndNullHandle(dfn.vkDestroyCommandPool, device,
|
util::DestroyAndNullHandle(dfn.vkDestroyCommandPool, device,
|
||||||
submission.command_pool);
|
submission.command_pool);
|
||||||
util::DestroyAndNullHandle(dfn.vkDestroyFence, device, submission.fence);
|
util::DestroyAndNullHandle(dfn.vkDestroyFence, device, submission.fence);
|
||||||
|
if (i < swap_setup_command_buffers_allocated_count_) {
|
||||||
|
dfn.vkDestroyCommandPool(device, swap_setup_command_buffers_[i].first,
|
||||||
|
nullptr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
swap_submission_current_ = 1;
|
swap_setup_command_buffers_free_bits_ = 0;
|
||||||
swap_submission_completed_ = 0;
|
swap_setup_command_buffers_allocated_count_ = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
ImmediateDrawer* VulkanContext::immediate_drawer() {
|
ImmediateDrawer* VulkanContext::immediate_drawer() {
|
||||||
|
@ -645,27 +653,10 @@ bool VulkanContext::BeginSwap() {
|
||||||
// Await the frame data to be available before doing anything else.
|
// Await the frame data to be available before doing anything else.
|
||||||
if (swap_submission_completed_ + kSwapchainMaxImageCount <
|
if (swap_submission_completed_ + kSwapchainMaxImageCount <
|
||||||
swap_submission_current_) {
|
swap_submission_current_) {
|
||||||
uint64_t submission_awaited =
|
if (!AwaitSwapSubmissionsCompletion(
|
||||||
swap_submission_current_ - kSwapchainMaxImageCount;
|
swap_submission_current_ - kSwapchainMaxImageCount, false)) {
|
||||||
VkFence submission_fences[kSwapchainMaxImageCount];
|
XELOGE("Failed to await the Vulkan presentation submission fences");
|
||||||
uint32_t submission_fence_count = 0;
|
return false;
|
||||||
while (swap_submission_completed_ + 1 + submission_fence_count <=
|
|
||||||
submission_awaited) {
|
|
||||||
assert_true(submission_fence_count < kSwapchainMaxImageCount);
|
|
||||||
uint32_t submission_index =
|
|
||||||
(swap_submission_completed_ + 1 + submission_fence_count) %
|
|
||||||
kSwapchainMaxImageCount;
|
|
||||||
submission_fences[submission_fence_count++] =
|
|
||||||
swap_submissions_[submission_index].fence;
|
|
||||||
}
|
|
||||||
if (submission_fence_count) {
|
|
||||||
if (dfn.vkWaitForFences(device, submission_fence_count,
|
|
||||||
submission_fences, VK_TRUE,
|
|
||||||
UINT64_MAX) != VK_SUCCESS) {
|
|
||||||
XELOGE("Failed to await the Vulkan presentation submission fences");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
swap_submission_completed_ += submission_fence_count;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -753,8 +744,20 @@ void VulkanContext::EndSwap() {
|
||||||
|
|
||||||
const SwapSubmission& submission =
|
const SwapSubmission& submission =
|
||||||
swap_submissions_[swap_submission_current_ % kSwapchainMaxImageCount];
|
swap_submissions_[swap_submission_current_ % kSwapchainMaxImageCount];
|
||||||
|
VkCommandBuffer submit_command_buffers[2];
|
||||||
|
uint32_t submit_command_buffer_count = 0;
|
||||||
|
if (submission.setup_command_buffer_index != UINT32_MAX) {
|
||||||
|
VkCommandBuffer submit_setup_command_buffer =
|
||||||
|
swap_setup_command_buffers_[submission.setup_command_buffer_index]
|
||||||
|
.second;
|
||||||
|
dfn.vkEndCommandBuffer(submit_setup_command_buffer);
|
||||||
|
submit_command_buffers[submit_command_buffer_count++] =
|
||||||
|
submit_setup_command_buffer;
|
||||||
|
}
|
||||||
dfn.vkCmdEndRenderPass(submission.command_buffer);
|
dfn.vkCmdEndRenderPass(submission.command_buffer);
|
||||||
dfn.vkEndCommandBuffer(submission.command_buffer);
|
dfn.vkEndCommandBuffer(submission.command_buffer);
|
||||||
|
submit_command_buffers[submit_command_buffer_count++] =
|
||||||
|
submission.command_buffer;
|
||||||
dfn.vkResetFences(device, 1, &submission.fence);
|
dfn.vkResetFences(device, 1, &submission.fence);
|
||||||
VkSubmitInfo submit_info;
|
VkSubmitInfo submit_info;
|
||||||
submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
|
submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
|
||||||
|
@ -764,8 +767,8 @@ void VulkanContext::EndSwap() {
|
||||||
VkPipelineStageFlags image_acquisition_semaphore_wait_stage =
|
VkPipelineStageFlags image_acquisition_semaphore_wait_stage =
|
||||||
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||||
submit_info.pWaitDstStageMask = &image_acquisition_semaphore_wait_stage;
|
submit_info.pWaitDstStageMask = &image_acquisition_semaphore_wait_stage;
|
||||||
submit_info.commandBufferCount = 1;
|
submit_info.commandBufferCount = submit_command_buffer_count;
|
||||||
submit_info.pCommandBuffers = &submission.command_buffer;
|
submit_info.pCommandBuffers = submit_command_buffers;
|
||||||
submit_info.signalSemaphoreCount = 1;
|
submit_info.signalSemaphoreCount = 1;
|
||||||
submit_info.pSignalSemaphores = &swap_render_completion_semaphore_;
|
submit_info.pSignalSemaphores = &swap_render_completion_semaphore_;
|
||||||
VkResult submit_result = dfn.vkQueueSubmit(queue_graphics_compute, 1,
|
VkResult submit_result = dfn.vkQueueSubmit(queue_graphics_compute, 1,
|
||||||
|
@ -845,22 +848,124 @@ void VulkanContext::RequestSurfaceRecreation() {
|
||||||
swap_surface_ = VK_NULL_HANDLE;
|
swap_surface_ = VK_NULL_HANDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
void VulkanContext::AwaitAllSwapSubmissionsCompletion() {
|
bool VulkanContext::AwaitSwapSubmissionsCompletion(uint64_t awaited_submission,
|
||||||
|
bool ignore_result) {
|
||||||
assert_not_null(target_window_);
|
assert_not_null(target_window_);
|
||||||
|
assert_true(awaited_submission < swap_submission_current_);
|
||||||
const VulkanProvider& provider = GetVulkanProvider();
|
const VulkanProvider& provider = GetVulkanProvider();
|
||||||
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
VkDevice device = provider.device();
|
VkDevice device = provider.device();
|
||||||
VkFence fences[kSwapchainMaxImageCount];
|
VkFence fences[kSwapchainMaxImageCount];
|
||||||
uint32_t fence_count = 0;
|
uint32_t fence_count = 0;
|
||||||
while (swap_submission_completed_ + 1 < swap_submission_current_) {
|
while (swap_submission_completed_ + 1 + fence_count <= awaited_submission) {
|
||||||
assert_true(fence_count < kSwapchainMaxImageCount);
|
assert_true(fence_count < kSwapchainMaxImageCount);
|
||||||
uint32_t submission_index =
|
uint32_t submission_index = (swap_submission_completed_ + 1 + fence_count) %
|
||||||
++swap_submission_completed_ % kSwapchainMaxImageCount;
|
kSwapchainMaxImageCount;
|
||||||
fences[fence_count++] = swap_submissions_[submission_index].fence;
|
fences[fence_count++] = swap_submissions_[submission_index].fence;
|
||||||
}
|
}
|
||||||
if (fence_count && !context_lost_) {
|
if (!fence_count) {
|
||||||
dfn.vkWaitForFences(device, fence_count, fences, VK_TRUE, UINT64_MAX);
|
return true;
|
||||||
}
|
}
|
||||||
|
VkResult result =
|
||||||
|
dfn.vkWaitForFences(device, fence_count, fences, VK_TRUE, UINT64_MAX);
|
||||||
|
if (!ignore_result && result != VK_SUCCESS) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Reclaim setup command buffers if used.
|
||||||
|
for (uint32_t i = 0; i < fence_count; ++i) {
|
||||||
|
uint32_t submission_index =
|
||||||
|
(swap_submission_completed_ + 1 + i) % kSwapchainMaxImageCount;
|
||||||
|
uint32_t& setup_command_buffer_index =
|
||||||
|
swap_submissions_[submission_index].setup_command_buffer_index;
|
||||||
|
if (setup_command_buffer_index == UINT32_MAX) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
assert_zero(swap_setup_command_buffers_free_bits_ &
|
||||||
|
(uint32_t(1) << setup_command_buffer_index));
|
||||||
|
swap_setup_command_buffers_free_bits_ |= uint32_t(1)
|
||||||
|
<< setup_command_buffer_index;
|
||||||
|
setup_command_buffer_index = UINT32_MAX;
|
||||||
|
}
|
||||||
|
swap_submission_completed_ += fence_count;
|
||||||
|
return result == VK_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
VkCommandBuffer VulkanContext::AcquireSwapSetupCommandBuffer() {
|
||||||
|
assert_not_null(target_window_);
|
||||||
|
|
||||||
|
uint32_t& submission_command_buffer_index =
|
||||||
|
swap_submissions_[swap_submission_current_ % kSwapchainMaxImageCount]
|
||||||
|
.setup_command_buffer_index;
|
||||||
|
if (submission_command_buffer_index != UINT32_MAX) {
|
||||||
|
// A command buffer is already being recorded.
|
||||||
|
return swap_setup_command_buffers_[submission_command_buffer_index].second;
|
||||||
|
}
|
||||||
|
|
||||||
|
const VulkanProvider& provider = GetVulkanProvider();
|
||||||
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
|
VkDevice device = provider.device();
|
||||||
|
|
||||||
|
VkCommandBufferBeginInfo command_buffer_begin_info;
|
||||||
|
command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
||||||
|
command_buffer_begin_info.pNext = nullptr;
|
||||||
|
command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
|
||||||
|
command_buffer_begin_info.pInheritanceInfo = nullptr;
|
||||||
|
|
||||||
|
// Try to use a recycled one.
|
||||||
|
uint32_t command_buffer_index;
|
||||||
|
if (xe::bit_scan_forward(swap_setup_command_buffers_free_bits_,
|
||||||
|
&command_buffer_index)) {
|
||||||
|
const std::pair<VkCommandPool, VkCommandBuffer>& command_buffer =
|
||||||
|
swap_setup_command_buffers_[command_buffer_index];
|
||||||
|
if (dfn.vkResetCommandPool(device, command_buffer.first, 0) != VK_SUCCESS ||
|
||||||
|
dfn.vkBeginCommandBuffer(command_buffer.second,
|
||||||
|
&command_buffer_begin_info) != VK_SUCCESS) {
|
||||||
|
return VK_NULL_HANDLE;
|
||||||
|
}
|
||||||
|
submission_command_buffer_index = command_buffer_index;
|
||||||
|
swap_setup_command_buffers_free_bits_ &=
|
||||||
|
~(uint32_t(1) << command_buffer_index);
|
||||||
|
return command_buffer.second;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new command buffer.
|
||||||
|
assert_true(swap_setup_command_buffers_allocated_count_ <
|
||||||
|
kSwapchainMaxImageCount);
|
||||||
|
if (swap_setup_command_buffers_allocated_count_ >= kSwapchainMaxImageCount) {
|
||||||
|
return VK_NULL_HANDLE;
|
||||||
|
}
|
||||||
|
VkCommandPoolCreateInfo command_pool_create_info;
|
||||||
|
command_pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
|
||||||
|
command_pool_create_info.pNext = nullptr;
|
||||||
|
command_pool_create_info.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
|
||||||
|
command_pool_create_info.queueFamilyIndex =
|
||||||
|
provider.queue_family_graphics_compute();
|
||||||
|
VkCommandPool new_command_pool;
|
||||||
|
if (dfn.vkCreateCommandPool(device, &command_pool_create_info, nullptr,
|
||||||
|
&new_command_pool) != VK_SUCCESS) {
|
||||||
|
return VK_NULL_HANDLE;
|
||||||
|
}
|
||||||
|
VkCommandBufferAllocateInfo command_buffer_allocate_info;
|
||||||
|
command_buffer_allocate_info.sType =
|
||||||
|
VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
|
||||||
|
command_buffer_allocate_info.pNext = nullptr;
|
||||||
|
command_buffer_allocate_info.commandPool = new_command_pool;
|
||||||
|
command_buffer_allocate_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
|
||||||
|
command_buffer_allocate_info.commandBufferCount = 1;
|
||||||
|
VkCommandBuffer new_command_buffer;
|
||||||
|
if (dfn.vkAllocateCommandBuffers(device, &command_buffer_allocate_info,
|
||||||
|
&new_command_buffer) != VK_SUCCESS ||
|
||||||
|
dfn.vkBeginCommandBuffer(new_command_buffer,
|
||||||
|
&command_buffer_begin_info) != VK_SUCCESS) {
|
||||||
|
dfn.vkDestroyCommandPool(device, new_command_pool, nullptr);
|
||||||
|
return VK_NULL_HANDLE;
|
||||||
|
}
|
||||||
|
uint32_t new_command_buffer_index =
|
||||||
|
swap_setup_command_buffers_allocated_count_++;
|
||||||
|
submission_command_buffer_index = new_command_buffer_index;
|
||||||
|
swap_setup_command_buffers_[new_command_buffer_index] =
|
||||||
|
std::make_pair(new_command_pool, new_command_buffer);
|
||||||
|
return new_command_buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
void VulkanContext::DestroySwapchainFramebuffers() {
|
void VulkanContext::DestroySwapchainFramebuffers() {
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "xenia/ui/graphics_context.h"
|
#include "xenia/ui/graphics_context.h"
|
||||||
|
@ -45,6 +46,7 @@ class VulkanContext : public GraphicsContext {
|
||||||
return swap_submissions_[swap_submission_current_ % kSwapchainMaxImageCount]
|
return swap_submissions_[swap_submission_current_ % kSwapchainMaxImageCount]
|
||||||
.command_buffer;
|
.command_buffer;
|
||||||
}
|
}
|
||||||
|
VkCommandBuffer AcquireSwapSetupCommandBuffer();
|
||||||
uint64_t swap_submission_current() const { return swap_submission_current_; }
|
uint64_t swap_submission_current() const { return swap_submission_current_; }
|
||||||
uint64_t swap_submission_completed() const {
|
uint64_t swap_submission_completed() const {
|
||||||
return swap_submission_completed_;
|
return swap_submission_completed_;
|
||||||
|
@ -63,7 +65,12 @@ class VulkanContext : public GraphicsContext {
|
||||||
private:
|
private:
|
||||||
void Shutdown();
|
void Shutdown();
|
||||||
|
|
||||||
void AwaitAllSwapSubmissionsCompletion();
|
bool AwaitSwapSubmissionsCompletion(uint64_t awaited_submission,
|
||||||
|
bool ignore_result);
|
||||||
|
void AwaitAllSwapSubmissionsCompletion() {
|
||||||
|
// Current starts from 1, so subtracting 1 can't result in a negative value.
|
||||||
|
AwaitSwapSubmissionsCompletion(swap_submission_current_ - 1, true);
|
||||||
|
}
|
||||||
|
|
||||||
// AwaitAllSwapSubmissionsCompletion must be called before. As this can be
|
// AwaitAllSwapSubmissionsCompletion must be called before. As this can be
|
||||||
// used in swapchain creation or in shutdown,
|
// used in swapchain creation or in shutdown,
|
||||||
|
@ -83,6 +90,13 @@ class VulkanContext : public GraphicsContext {
|
||||||
// (it's okay to wait first for completion of A, then of B, no matter if they
|
// (it's okay to wait first for completion of A, then of B, no matter if they
|
||||||
// are actually completed in AB or in BA order).
|
// are actually completed in AB or in BA order).
|
||||||
|
|
||||||
|
// May be used infrequently, so allocated on demand (to only keep 1 rather
|
||||||
|
// than 3).
|
||||||
|
std::pair<VkCommandPool, VkCommandBuffer>
|
||||||
|
swap_setup_command_buffers_[kSwapchainMaxImageCount];
|
||||||
|
uint32_t swap_setup_command_buffers_allocated_count_ = 0;
|
||||||
|
uint32_t swap_setup_command_buffers_free_bits_ = 0;
|
||||||
|
|
||||||
struct SwapSubmission {
|
struct SwapSubmission {
|
||||||
// One pool per frame, with resetting the pool itself rather than individual
|
// One pool per frame, with resetting the pool itself rather than individual
|
||||||
// command buffers (resetting command buffers themselves is not recommended
|
// command buffers (resetting command buffers themselves is not recommended
|
||||||
|
@ -92,6 +106,7 @@ class VulkanContext : public GraphicsContext {
|
||||||
VkFence fence = VK_NULL_HANDLE;
|
VkFence fence = VK_NULL_HANDLE;
|
||||||
VkCommandPool command_pool = VK_NULL_HANDLE;
|
VkCommandPool command_pool = VK_NULL_HANDLE;
|
||||||
VkCommandBuffer command_buffer;
|
VkCommandBuffer command_buffer;
|
||||||
|
uint32_t setup_command_buffer_index = UINT32_MAX;
|
||||||
};
|
};
|
||||||
SwapSubmission swap_submissions_[kSwapchainMaxImageCount];
|
SwapSubmission swap_submissions_[kSwapchainMaxImageCount];
|
||||||
uint64_t swap_submission_current_ = 1;
|
uint64_t swap_submission_current_ = 1;
|
||||||
|
|
|
@ -26,12 +26,6 @@ namespace vulkan {
|
||||||
#include "xenia/ui/shaders/bytecode/vulkan_spirv/immediate_frag.h"
|
#include "xenia/ui/shaders/bytecode/vulkan_spirv/immediate_frag.h"
|
||||||
#include "xenia/ui/shaders/bytecode/vulkan_spirv/immediate_vert.h"
|
#include "xenia/ui/shaders/bytecode/vulkan_spirv/immediate_vert.h"
|
||||||
|
|
||||||
class VulkanImmediateTexture : public ImmediateTexture {
|
|
||||||
public:
|
|
||||||
VulkanImmediateTexture(uint32_t width, uint32_t height)
|
|
||||||
: ImmediateTexture(width, height) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
VulkanImmediateDrawer::VulkanImmediateDrawer(VulkanContext& graphics_context)
|
VulkanImmediateDrawer::VulkanImmediateDrawer(VulkanContext& graphics_context)
|
||||||
: ImmediateDrawer(&graphics_context), context_(graphics_context) {}
|
: ImmediateDrawer(&graphics_context), context_(graphics_context) {}
|
||||||
|
|
||||||
|
@ -42,6 +36,42 @@ bool VulkanImmediateDrawer::Initialize() {
|
||||||
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
VkDevice device = provider.device();
|
VkDevice device = provider.device();
|
||||||
|
|
||||||
|
VkDescriptorSetLayoutBinding texture_descriptor_set_layout_binding;
|
||||||
|
texture_descriptor_set_layout_binding.binding = 0;
|
||||||
|
texture_descriptor_set_layout_binding.descriptorType =
|
||||||
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
||||||
|
texture_descriptor_set_layout_binding.descriptorCount = 1;
|
||||||
|
texture_descriptor_set_layout_binding.stageFlags =
|
||||||
|
VK_SHADER_STAGE_FRAGMENT_BIT;
|
||||||
|
texture_descriptor_set_layout_binding.pImmutableSamplers = nullptr;
|
||||||
|
VkDescriptorSetLayoutCreateInfo texture_descriptor_set_layout_create_info;
|
||||||
|
texture_descriptor_set_layout_create_info.sType =
|
||||||
|
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
|
||||||
|
texture_descriptor_set_layout_create_info.pNext = nullptr;
|
||||||
|
texture_descriptor_set_layout_create_info.flags = 0;
|
||||||
|
texture_descriptor_set_layout_create_info.bindingCount = 1;
|
||||||
|
texture_descriptor_set_layout_create_info.pBindings =
|
||||||
|
&texture_descriptor_set_layout_binding;
|
||||||
|
if (dfn.vkCreateDescriptorSetLayout(
|
||||||
|
device, &texture_descriptor_set_layout_create_info, nullptr,
|
||||||
|
&texture_descriptor_set_layout_) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to create the immediate drawer Vulkan combined image sampler "
|
||||||
|
"descriptor set layout");
|
||||||
|
Shutdown();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the (1, 1, 1, 1) texture as a replacement when drawing without a
|
||||||
|
// real texture.
|
||||||
|
white_texture_index_ = CreateVulkanTexture(
|
||||||
|
1, 1, ImmediateTextureFilter::kNearest, false, nullptr);
|
||||||
|
if (white_texture_index_ == SIZE_MAX) {
|
||||||
|
XELOGE("Failed to create a blank texture for the Vulkan immediate drawer");
|
||||||
|
Shutdown();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
VkPushConstantRange push_constant_ranges[1];
|
VkPushConstantRange push_constant_ranges[1];
|
||||||
push_constant_ranges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
|
push_constant_ranges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
|
||||||
push_constant_ranges[0].offset = offsetof(PushConstants, vertex);
|
push_constant_ranges[0].offset = offsetof(PushConstants, vertex);
|
||||||
|
@ -51,8 +81,8 @@ bool VulkanImmediateDrawer::Initialize() {
|
||||||
VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
|
VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
|
||||||
pipeline_layout_create_info.pNext = nullptr;
|
pipeline_layout_create_info.pNext = nullptr;
|
||||||
pipeline_layout_create_info.flags = 0;
|
pipeline_layout_create_info.flags = 0;
|
||||||
pipeline_layout_create_info.setLayoutCount = 0;
|
pipeline_layout_create_info.setLayoutCount = 1;
|
||||||
pipeline_layout_create_info.pSetLayouts = nullptr;
|
pipeline_layout_create_info.pSetLayouts = &texture_descriptor_set_layout_;
|
||||||
pipeline_layout_create_info.pushConstantRangeCount =
|
pipeline_layout_create_info.pushConstantRangeCount =
|
||||||
uint32_t(xe::countof(push_constant_ranges));
|
uint32_t(xe::countof(push_constant_ranges));
|
||||||
pipeline_layout_create_info.pPushConstantRanges = push_constant_ranges;
|
pipeline_layout_create_info.pPushConstantRanges = push_constant_ranges;
|
||||||
|
@ -86,13 +116,71 @@ void VulkanImmediateDrawer::Shutdown() {
|
||||||
|
|
||||||
util::DestroyAndNullHandle(dfn.vkDestroyPipelineLayout, device,
|
util::DestroyAndNullHandle(dfn.vkDestroyPipelineLayout, device,
|
||||||
pipeline_layout_);
|
pipeline_layout_);
|
||||||
|
|
||||||
|
for (SubmittedTextureUpload& submitted_texture_upload :
|
||||||
|
texture_uploads_submitted_) {
|
||||||
|
if (submitted_texture_upload.buffer != VK_NULL_HANDLE) {
|
||||||
|
dfn.vkDestroyBuffer(device, submitted_texture_upload.buffer, nullptr);
|
||||||
|
}
|
||||||
|
if (submitted_texture_upload.buffer_memory != VK_NULL_HANDLE) {
|
||||||
|
dfn.vkFreeMemory(device, submitted_texture_upload.buffer_memory, nullptr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
texture_uploads_submitted_.clear();
|
||||||
|
for (PendingTextureUpload& pending_texture_upload :
|
||||||
|
texture_uploads_pending_) {
|
||||||
|
if (pending_texture_upload.buffer != VK_NULL_HANDLE) {
|
||||||
|
dfn.vkDestroyBuffer(device, pending_texture_upload.buffer, nullptr);
|
||||||
|
}
|
||||||
|
if (pending_texture_upload.buffer_memory != VK_NULL_HANDLE) {
|
||||||
|
dfn.vkFreeMemory(device, pending_texture_upload.buffer_memory, nullptr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
texture_uploads_pending_.clear();
|
||||||
|
textures_free_.clear();
|
||||||
|
for (Texture& texture : textures_) {
|
||||||
|
if (!texture.reference_count) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (texture.immediate_texture) {
|
||||||
|
texture.immediate_texture->DetachFromImmediateDrawer();
|
||||||
|
}
|
||||||
|
dfn.vkDestroyImageView(device, texture.image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, texture.image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, texture.memory, nullptr);
|
||||||
|
}
|
||||||
|
textures_.clear();
|
||||||
|
|
||||||
|
texture_descriptor_pool_recycled_first_ = nullptr;
|
||||||
|
texture_descriptor_pool_unallocated_first_ = nullptr;
|
||||||
|
for (TextureDescriptorPool* pool : texture_descriptor_pools_) {
|
||||||
|
dfn.vkDestroyDescriptorPool(device, pool->pool, nullptr);
|
||||||
|
delete pool;
|
||||||
|
}
|
||||||
|
texture_descriptor_pools_.clear();
|
||||||
|
util::DestroyAndNullHandle(dfn.vkDestroyDescriptorSetLayout, device,
|
||||||
|
texture_descriptor_set_layout_);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<ImmediateTexture> VulkanImmediateDrawer::CreateTexture(
|
std::unique_ptr<ImmediateTexture> VulkanImmediateDrawer::CreateTexture(
|
||||||
uint32_t width, uint32_t height, ImmediateTextureFilter filter, bool repeat,
|
uint32_t width, uint32_t height, ImmediateTextureFilter filter,
|
||||||
const uint8_t* data) {
|
bool is_repeated, const uint8_t* data) {
|
||||||
auto texture = std::make_unique<VulkanImmediateTexture>(width, height);
|
assert_not_null(data);
|
||||||
return std::unique_ptr<ImmediateTexture>(texture.release());
|
size_t texture_index =
|
||||||
|
CreateVulkanTexture(width, height, filter, is_repeated, data);
|
||||||
|
if (texture_index == SIZE_MAX) {
|
||||||
|
texture_index = white_texture_index_;
|
||||||
|
}
|
||||||
|
Texture& texture = textures_[texture_index];
|
||||||
|
auto immediate_texture = std::make_unique<VulkanImmediateTexture>(
|
||||||
|
width, height, this, GetTextureHandleForIndex(texture_index));
|
||||||
|
if (texture_index != white_texture_index_) {
|
||||||
|
texture.immediate_texture = immediate_texture.get();
|
||||||
|
}
|
||||||
|
// Transferring a new reference to a real texture or giving a weak reference
|
||||||
|
// to the white texture (there's no backlink to the ImmediateTexture from it
|
||||||
|
// also).
|
||||||
|
return std::unique_ptr<ImmediateTexture>(immediate_texture.release());
|
||||||
}
|
}
|
||||||
|
|
||||||
void VulkanImmediateDrawer::Begin(int render_target_width,
|
void VulkanImmediateDrawer::Begin(int render_target_width,
|
||||||
|
@ -107,10 +195,32 @@ void VulkanImmediateDrawer::Begin(int render_target_width,
|
||||||
current_command_buffer_ = context_.GetSwapCommandBuffer();
|
current_command_buffer_ = context_.GetSwapCommandBuffer();
|
||||||
|
|
||||||
uint64_t submission_completed = context_.swap_submission_completed();
|
uint64_t submission_completed = context_.swap_submission_completed();
|
||||||
vertex_buffer_pool_->Reclaim(submission_completed);
|
|
||||||
|
|
||||||
const VulkanProvider::DeviceFunctions& dfn =
|
const VulkanProvider& provider = context_.GetVulkanProvider();
|
||||||
context_.GetVulkanProvider().dfn();
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
|
VkDevice device = provider.device();
|
||||||
|
|
||||||
|
// Release upload buffers for completed texture uploads.
|
||||||
|
auto erase_texture_uploads_end = texture_uploads_submitted_.begin();
|
||||||
|
while (erase_texture_uploads_end != texture_uploads_submitted_.end()) {
|
||||||
|
if (erase_texture_uploads_end->submission_index > submission_completed) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (erase_texture_uploads_end->buffer != VK_NULL_HANDLE) {
|
||||||
|
dfn.vkDestroyBuffer(device, erase_texture_uploads_end->buffer, nullptr);
|
||||||
|
}
|
||||||
|
if (erase_texture_uploads_end->buffer_memory != VK_NULL_HANDLE) {
|
||||||
|
dfn.vkFreeMemory(device, erase_texture_uploads_end->buffer_memory,
|
||||||
|
nullptr);
|
||||||
|
}
|
||||||
|
// Release the texture reference held for uploading.
|
||||||
|
ReleaseTexture(erase_texture_uploads_end->texture_index);
|
||||||
|
++erase_texture_uploads_end;
|
||||||
|
}
|
||||||
|
texture_uploads_submitted_.erase(texture_uploads_submitted_.begin(),
|
||||||
|
erase_texture_uploads_end);
|
||||||
|
|
||||||
|
vertex_buffer_pool_->Reclaim(submission_completed);
|
||||||
|
|
||||||
current_render_target_extent_.width = uint32_t(render_target_width);
|
current_render_target_extent_.width = uint32_t(render_target_width);
|
||||||
current_render_target_extent_.height = uint32_t(render_target_height);
|
current_render_target_extent_.height = uint32_t(render_target_height);
|
||||||
|
@ -135,6 +245,7 @@ void VulkanImmediateDrawer::Begin(int render_target_width,
|
||||||
current_scissor_.extent.height = 0;
|
current_scissor_.extent.height = 0;
|
||||||
|
|
||||||
current_pipeline_ = VK_NULL_HANDLE;
|
current_pipeline_ = VK_NULL_HANDLE;
|
||||||
|
current_texture_descriptor_index_ = UINT32_MAX;
|
||||||
}
|
}
|
||||||
|
|
||||||
void VulkanImmediateDrawer::BeginDrawBatch(const ImmediateDrawBatch& batch) {
|
void VulkanImmediateDrawer::BeginDrawBatch(const ImmediateDrawBatch& batch) {
|
||||||
|
@ -221,7 +332,7 @@ void VulkanImmediateDrawer::Draw(const ImmediateDraw& draw) {
|
||||||
dfn.vkCmdSetScissor(current_command_buffer_, 0, 1, &scissor);
|
dfn.vkCmdSetScissor(current_command_buffer_, 0, 1, &scissor);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bind the pipeline for the current primitive count.
|
// Bind the pipeline for the current primitive type.
|
||||||
VkPipeline pipeline;
|
VkPipeline pipeline;
|
||||||
switch (draw.primitive_type) {
|
switch (draw.primitive_type) {
|
||||||
case ImmediatePrimitiveType::kLines:
|
case ImmediatePrimitiveType::kLines:
|
||||||
|
@ -240,6 +351,18 @@ void VulkanImmediateDrawer::Draw(const ImmediateDraw& draw) {
|
||||||
VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
|
VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Bind the texture.
|
||||||
|
uint32_t texture_descriptor_index =
|
||||||
|
textures_[GetTextureIndexForHandle(draw.texture_handle)].descriptor_index;
|
||||||
|
if (current_texture_descriptor_index_ != texture_descriptor_index) {
|
||||||
|
current_texture_descriptor_index_ = texture_descriptor_index;
|
||||||
|
VkDescriptorSet texture_descriptor_set =
|
||||||
|
GetTextureDescriptor(texture_descriptor_index);
|
||||||
|
dfn.vkCmdBindDescriptorSets(
|
||||||
|
current_command_buffer_, VK_PIPELINE_BIND_POINT_GRAPHICS,
|
||||||
|
pipeline_layout_, 0, 1, &texture_descriptor_set, 0, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
// Draw.
|
// Draw.
|
||||||
if (batch_has_index_buffer_) {
|
if (batch_has_index_buffer_) {
|
||||||
dfn.vkCmdDrawIndexed(current_command_buffer_, draw.count, 1,
|
dfn.vkCmdDrawIndexed(current_command_buffer_, draw.count, 1,
|
||||||
|
@ -258,6 +381,110 @@ void VulkanImmediateDrawer::End() {
|
||||||
// available.
|
// available.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Copy textures.
|
||||||
|
if (!texture_uploads_pending_.empty()) {
|
||||||
|
VkCommandBuffer setup_command_buffer =
|
||||||
|
context_.AcquireSwapSetupCommandBuffer();
|
||||||
|
if (setup_command_buffer != VK_NULL_HANDLE) {
|
||||||
|
const VulkanProvider::DeviceFunctions& dfn =
|
||||||
|
context_.GetVulkanProvider().dfn();
|
||||||
|
size_t texture_uploads_pending_count = texture_uploads_pending_.size();
|
||||||
|
uint64_t submission_current = context_.swap_submission_current();
|
||||||
|
|
||||||
|
// Transition to VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL.
|
||||||
|
std::vector<VkImageMemoryBarrier> image_memory_barriers;
|
||||||
|
image_memory_barriers.reserve(texture_uploads_pending_count);
|
||||||
|
VkImageMemoryBarrier image_memory_barrier;
|
||||||
|
image_memory_barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||||
|
image_memory_barrier.pNext = nullptr;
|
||||||
|
image_memory_barrier.srcAccessMask = 0;
|
||||||
|
image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||||
|
image_memory_barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||||
|
image_memory_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||||
|
image_memory_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||||
|
image_memory_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||||
|
util::InitializeSubresourceRange(image_memory_barrier.subresourceRange);
|
||||||
|
for (const PendingTextureUpload& pending_texture_upload :
|
||||||
|
texture_uploads_pending_) {
|
||||||
|
image_memory_barriers.emplace_back(image_memory_barrier).image =
|
||||||
|
textures_[pending_texture_upload.texture_index].image;
|
||||||
|
}
|
||||||
|
dfn.vkCmdPipelineBarrier(
|
||||||
|
setup_command_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
|
||||||
|
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr,
|
||||||
|
uint32_t(image_memory_barriers.size()), image_memory_barriers.data());
|
||||||
|
|
||||||
|
// Do transfer operations and transition to
|
||||||
|
// VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL.
|
||||||
|
for (size_t i = 0; i < texture_uploads_pending_count; ++i) {
|
||||||
|
const PendingTextureUpload& pending_texture_upload =
|
||||||
|
texture_uploads_pending_[i];
|
||||||
|
VkImage texture_upload_image =
|
||||||
|
textures_[pending_texture_upload.texture_index].image;
|
||||||
|
if (pending_texture_upload.buffer != VK_NULL_HANDLE) {
|
||||||
|
// Copying.
|
||||||
|
VkBufferImageCopy copy_region;
|
||||||
|
copy_region.bufferOffset = 0;
|
||||||
|
copy_region.bufferRowLength = pending_texture_upload.width;
|
||||||
|
copy_region.bufferImageHeight = pending_texture_upload.height;
|
||||||
|
copy_region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||||
|
copy_region.imageSubresource.mipLevel = 0;
|
||||||
|
copy_region.imageSubresource.baseArrayLayer = 0;
|
||||||
|
copy_region.imageSubresource.layerCount = 1;
|
||||||
|
copy_region.imageOffset.x = 0;
|
||||||
|
copy_region.imageOffset.y = 0;
|
||||||
|
copy_region.imageOffset.z = 0;
|
||||||
|
copy_region.imageExtent.width = pending_texture_upload.width;
|
||||||
|
copy_region.imageExtent.height = pending_texture_upload.height;
|
||||||
|
copy_region.imageExtent.depth = 1;
|
||||||
|
dfn.vkCmdCopyBufferToImage(
|
||||||
|
setup_command_buffer, pending_texture_upload.buffer,
|
||||||
|
texture_upload_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1,
|
||||||
|
©_region);
|
||||||
|
} else {
|
||||||
|
// Clearing (initializing the empty image).
|
||||||
|
VkClearColorValue white_clear_value;
|
||||||
|
white_clear_value.float32[0] = 1.0f;
|
||||||
|
white_clear_value.float32[1] = 1.0f;
|
||||||
|
white_clear_value.float32[2] = 1.0f;
|
||||||
|
white_clear_value.float32[3] = 1.0f;
|
||||||
|
dfn.vkCmdClearColorImage(setup_command_buffer, texture_upload_image,
|
||||||
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||||
|
&white_clear_value, 1,
|
||||||
|
&image_memory_barrier.subresourceRange);
|
||||||
|
}
|
||||||
|
|
||||||
|
VkImageMemoryBarrier& image_memory_barrier_current =
|
||||||
|
image_memory_barriers[i];
|
||||||
|
image_memory_barrier_current.srcAccessMask =
|
||||||
|
VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||||
|
image_memory_barrier_current.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||||
|
image_memory_barrier_current.oldLayout =
|
||||||
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||||
|
image_memory_barrier_current.newLayout =
|
||||||
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
||||||
|
|
||||||
|
SubmittedTextureUpload& submitted_texture_upload =
|
||||||
|
texture_uploads_submitted_.emplace_back();
|
||||||
|
// Transfer the reference to the texture - need to keep it until the
|
||||||
|
// upload is completed.
|
||||||
|
submitted_texture_upload.texture_index =
|
||||||
|
pending_texture_upload.texture_index;
|
||||||
|
submitted_texture_upload.buffer = pending_texture_upload.buffer;
|
||||||
|
submitted_texture_upload.buffer_memory =
|
||||||
|
pending_texture_upload.buffer_memory;
|
||||||
|
submitted_texture_upload.submission_index = submission_current;
|
||||||
|
}
|
||||||
|
dfn.vkCmdPipelineBarrier(
|
||||||
|
setup_command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
|
||||||
|
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, nullptr, 0, nullptr,
|
||||||
|
uint32_t(image_memory_barriers.size()), image_memory_barriers.data());
|
||||||
|
|
||||||
|
texture_uploads_pending_.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
vertex_buffer_pool_->FlushWrites();
|
vertex_buffer_pool_->FlushWrites();
|
||||||
current_command_buffer_ = VK_NULL_HANDLE;
|
current_command_buffer_ = VK_NULL_HANDLE;
|
||||||
}
|
}
|
||||||
|
@ -460,6 +687,447 @@ bool VulkanImmediateDrawer::EnsurePipelinesCreated() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint32_t VulkanImmediateDrawer::AllocateTextureDescriptor() {
|
||||||
|
// Try to reuse a recycled descriptor first.
|
||||||
|
if (texture_descriptor_pool_recycled_first_) {
|
||||||
|
TextureDescriptorPool* pool = texture_descriptor_pool_recycled_first_;
|
||||||
|
assert_not_zero(pool->recycled_bits);
|
||||||
|
uint32_t local_index;
|
||||||
|
xe::bit_scan_forward(pool->recycled_bits, &local_index);
|
||||||
|
pool->recycled_bits &= ~(uint64_t(1) << local_index);
|
||||||
|
if (!pool->recycled_bits) {
|
||||||
|
texture_descriptor_pool_recycled_first_ = pool->recycled_next;
|
||||||
|
}
|
||||||
|
return (pool->index << 6) | local_index;
|
||||||
|
}
|
||||||
|
|
||||||
|
const VulkanProvider& provider = context_.GetVulkanProvider();
|
||||||
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
|
VkDevice device = provider.device();
|
||||||
|
|
||||||
|
VkDescriptorSetAllocateInfo allocate_info;
|
||||||
|
allocate_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
|
||||||
|
allocate_info.pNext = nullptr;
|
||||||
|
allocate_info.descriptorSetCount = 1;
|
||||||
|
allocate_info.pSetLayouts = &texture_descriptor_set_layout_;
|
||||||
|
|
||||||
|
// If no recycled, try to create a new allocation within an existing pool with
|
||||||
|
// unallocated descriptors left.
|
||||||
|
while (texture_descriptor_pool_unallocated_first_) {
|
||||||
|
TextureDescriptorPool* pool = texture_descriptor_pool_unallocated_first_;
|
||||||
|
assert_not_zero(pool->unallocated_count);
|
||||||
|
allocate_info.descriptorPool = pool->pool;
|
||||||
|
uint32_t local_index =
|
||||||
|
TextureDescriptorPool::kDescriptorCount - pool->unallocated_count;
|
||||||
|
VkResult allocate_result = dfn.vkAllocateDescriptorSets(
|
||||||
|
device, &allocate_info, &pool->sets[local_index]);
|
||||||
|
if (allocate_result == VK_SUCCESS) {
|
||||||
|
--pool->unallocated_count;
|
||||||
|
} else {
|
||||||
|
// Failed to allocate for some reason, don't try again for this pool.
|
||||||
|
pool->unallocated_count = 0;
|
||||||
|
}
|
||||||
|
if (!pool->unallocated_count) {
|
||||||
|
texture_descriptor_pool_unallocated_first_ = pool->unallocated_next;
|
||||||
|
}
|
||||||
|
if (allocate_result == VK_SUCCESS) {
|
||||||
|
return (pool->index << 6) | local_index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new pool and allocate the descriptor from it.
|
||||||
|
VkDescriptorPoolSize descriptor_pool_size;
|
||||||
|
descriptor_pool_size.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
||||||
|
descriptor_pool_size.descriptorCount =
|
||||||
|
TextureDescriptorPool::kDescriptorCount;
|
||||||
|
VkDescriptorPoolCreateInfo descriptor_pool_create_info;
|
||||||
|
descriptor_pool_create_info.sType =
|
||||||
|
VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
||||||
|
descriptor_pool_create_info.pNext = nullptr;
|
||||||
|
descriptor_pool_create_info.flags = 0;
|
||||||
|
descriptor_pool_create_info.maxSets = TextureDescriptorPool::kDescriptorCount;
|
||||||
|
descriptor_pool_create_info.poolSizeCount = 1;
|
||||||
|
descriptor_pool_create_info.pPoolSizes = &descriptor_pool_size;
|
||||||
|
VkDescriptorPool descriptor_pool;
|
||||||
|
if (dfn.vkCreateDescriptorPool(device, &descriptor_pool_create_info, nullptr,
|
||||||
|
&descriptor_pool) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to create an immediate drawer Vulkan combined image sampler "
|
||||||
|
"descriptor pool with {} descriptors",
|
||||||
|
TextureDescriptorPool::kDescriptorCount);
|
||||||
|
return UINT32_MAX;
|
||||||
|
}
|
||||||
|
allocate_info.descriptorPool = descriptor_pool;
|
||||||
|
VkDescriptorSet descriptor_set;
|
||||||
|
if (dfn.vkAllocateDescriptorSets(device, &allocate_info, &descriptor_set) !=
|
||||||
|
VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to allocate an immediate drawer Vulkan combined image sampler "
|
||||||
|
"descriptor");
|
||||||
|
dfn.vkDestroyDescriptorPool(device, descriptor_pool, nullptr);
|
||||||
|
return UINT32_MAX;
|
||||||
|
}
|
||||||
|
TextureDescriptorPool* new_pool = new TextureDescriptorPool;
|
||||||
|
new_pool->pool = descriptor_pool;
|
||||||
|
new_pool->sets[0] = descriptor_set;
|
||||||
|
uint32_t new_pool_index = uint32_t(texture_descriptor_pools_.size());
|
||||||
|
new_pool->index = new_pool_index;
|
||||||
|
new_pool->unallocated_count = TextureDescriptorPool::kDescriptorCount - 1;
|
||||||
|
new_pool->recycled_bits = 0;
|
||||||
|
new_pool->unallocated_next = texture_descriptor_pool_unallocated_first_;
|
||||||
|
texture_descriptor_pool_unallocated_first_ = new_pool;
|
||||||
|
new_pool->recycled_next = nullptr;
|
||||||
|
texture_descriptor_pools_.push_back(new_pool);
|
||||||
|
return new_pool_index << 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
VkDescriptorSet VulkanImmediateDrawer::GetTextureDescriptor(
|
||||||
|
uint32_t descriptor_index) const {
|
||||||
|
uint32_t pool_index = descriptor_index >> 6;
|
||||||
|
assert_true(pool_index < texture_descriptor_pools_.size());
|
||||||
|
const TextureDescriptorPool* pool = texture_descriptor_pools_[pool_index];
|
||||||
|
uint32_t allocation_index = descriptor_index & 63;
|
||||||
|
assert_true(allocation_index < TextureDescriptorPool::kDescriptorCount -
|
||||||
|
pool->unallocated_count);
|
||||||
|
return pool->sets[allocation_index];
|
||||||
|
}
|
||||||
|
|
||||||
|
void VulkanImmediateDrawer::FreeTextureDescriptor(uint32_t descriptor_index) {
|
||||||
|
uint32_t pool_index = descriptor_index >> 6;
|
||||||
|
assert_true(pool_index < texture_descriptor_pools_.size());
|
||||||
|
TextureDescriptorPool* pool = texture_descriptor_pools_[pool_index];
|
||||||
|
uint32_t allocation_index = descriptor_index & 63;
|
||||||
|
assert_true(allocation_index < TextureDescriptorPool::kDescriptorCount -
|
||||||
|
pool->unallocated_count);
|
||||||
|
assert_zero(pool->recycled_bits & (uint64_t(1) << allocation_index));
|
||||||
|
if (!pool->recycled_bits) {
|
||||||
|
// Add to the free list if not already in it.
|
||||||
|
pool->recycled_next = texture_descriptor_pool_recycled_first_;
|
||||||
|
texture_descriptor_pool_recycled_first_ = pool;
|
||||||
|
}
|
||||||
|
pool->recycled_bits |= uint64_t(1) << allocation_index;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t VulkanImmediateDrawer::CreateVulkanTexture(uint32_t width,
|
||||||
|
uint32_t height,
|
||||||
|
ImmediateTextureFilter filter,
|
||||||
|
bool is_repeated,
|
||||||
|
const uint8_t* data) {
|
||||||
|
const VulkanProvider& provider = context_.GetVulkanProvider();
|
||||||
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
|
VkDevice device = provider.device();
|
||||||
|
bool dedicated_allocation_supported =
|
||||||
|
provider.device_extensions().khr_dedicated_allocation;
|
||||||
|
|
||||||
|
// Create the image and the descriptor.
|
||||||
|
|
||||||
|
VkImageCreateInfo image_create_info;
|
||||||
|
image_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
|
||||||
|
image_create_info.pNext = nullptr;
|
||||||
|
image_create_info.flags = 0;
|
||||||
|
image_create_info.imageType = VK_IMAGE_TYPE_2D;
|
||||||
|
image_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
|
||||||
|
image_create_info.extent.width = width;
|
||||||
|
image_create_info.extent.height = height;
|
||||||
|
image_create_info.extent.depth = 1;
|
||||||
|
image_create_info.mipLevels = 1;
|
||||||
|
image_create_info.arrayLayers = 1;
|
||||||
|
image_create_info.samples = VK_SAMPLE_COUNT_1_BIT;
|
||||||
|
image_create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
|
||||||
|
image_create_info.usage =
|
||||||
|
VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
||||||
|
image_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
||||||
|
image_create_info.queueFamilyIndexCount = 0;
|
||||||
|
image_create_info.pQueueFamilyIndices = nullptr;
|
||||||
|
image_create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||||
|
VkImage image;
|
||||||
|
if (dfn.vkCreateImage(device, &image_create_info, nullptr, &image) !=
|
||||||
|
VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to create a Vulkan image for a {}x{} immediate drawer texture",
|
||||||
|
width, height);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
|
||||||
|
VkMemoryAllocateInfo image_memory_allocate_info;
|
||||||
|
VkMemoryRequirements image_memory_requirements;
|
||||||
|
dfn.vkGetImageMemoryRequirements(device, image, &image_memory_requirements);
|
||||||
|
if (!xe::bit_scan_forward(image_memory_requirements.memoryTypeBits &
|
||||||
|
provider.memory_types_device_local(),
|
||||||
|
&image_memory_allocate_info.memoryTypeIndex)) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to get a device-local memory type for a {}x{} immediate "
|
||||||
|
"drawer Vulkan image",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
image_memory_allocate_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
||||||
|
VkMemoryDedicatedAllocateInfoKHR image_memory_dedicated_allocate_info;
|
||||||
|
if (dedicated_allocation_supported) {
|
||||||
|
image_memory_dedicated_allocate_info.sType =
|
||||||
|
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR;
|
||||||
|
image_memory_dedicated_allocate_info.pNext = nullptr;
|
||||||
|
image_memory_dedicated_allocate_info.image = image;
|
||||||
|
image_memory_dedicated_allocate_info.buffer = VK_NULL_HANDLE;
|
||||||
|
image_memory_allocate_info.pNext = &image_memory_dedicated_allocate_info;
|
||||||
|
} else {
|
||||||
|
image_memory_allocate_info.pNext = nullptr;
|
||||||
|
}
|
||||||
|
image_memory_allocate_info.allocationSize = image_memory_requirements.size;
|
||||||
|
VkDeviceMemory image_memory;
|
||||||
|
if (dfn.vkAllocateMemory(device, &image_memory_allocate_info, nullptr,
|
||||||
|
&image_memory) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to allocate memory for a {}x{} immediate drawer Vulkan "
|
||||||
|
"image",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
if (dfn.vkBindImageMemory(device, image, image_memory, 0) != VK_SUCCESS) {
|
||||||
|
XELOGE("Failed to bind memory to a {}x{} immediate drawer Vulkan image",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
|
||||||
|
VkImageViewCreateInfo image_view_create_info;
|
||||||
|
image_view_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
|
||||||
|
image_view_create_info.pNext = nullptr;
|
||||||
|
image_view_create_info.flags = 0;
|
||||||
|
image_view_create_info.image = image;
|
||||||
|
image_view_create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
|
||||||
|
image_view_create_info.format = VK_FORMAT_R8G8B8A8_UNORM;
|
||||||
|
// data == nullptr is a special case for (1, 1, 1, 1).
|
||||||
|
VkComponentSwizzle swizzle =
|
||||||
|
data ? VK_COMPONENT_SWIZZLE_IDENTITY : VK_COMPONENT_SWIZZLE_ONE;
|
||||||
|
image_view_create_info.components.r = swizzle;
|
||||||
|
image_view_create_info.components.g = swizzle;
|
||||||
|
image_view_create_info.components.b = swizzle;
|
||||||
|
image_view_create_info.components.a = swizzle;
|
||||||
|
util::InitializeSubresourceRange(image_view_create_info.subresourceRange);
|
||||||
|
VkImageView image_view;
|
||||||
|
if (dfn.vkCreateImageView(device, &image_view_create_info, nullptr,
|
||||||
|
&image_view) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to create an image view for a {}x{} immediate drawer Vulkan "
|
||||||
|
"image",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t descriptor_index = AllocateTextureDescriptor();
|
||||||
|
if (descriptor_index == UINT32_MAX) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to allocate a Vulkan descriptor for a {}x{} immediate drawer "
|
||||||
|
"texture",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyImageView(device, image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
VkDescriptorImageInfo descriptor_image_info;
|
||||||
|
VulkanProvider::HostSampler host_sampler;
|
||||||
|
if (filter == ImmediateTextureFilter::kLinear) {
|
||||||
|
host_sampler = is_repeated ? VulkanProvider::HostSampler::kLinearRepeat
|
||||||
|
: VulkanProvider::HostSampler::kLinearClamp;
|
||||||
|
} else {
|
||||||
|
host_sampler = is_repeated ? VulkanProvider::HostSampler::kNearestRepeat
|
||||||
|
: VulkanProvider::HostSampler::kNearestClamp;
|
||||||
|
}
|
||||||
|
descriptor_image_info.sampler = provider.GetHostSampler(host_sampler);
|
||||||
|
descriptor_image_info.imageView = image_view;
|
||||||
|
descriptor_image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
||||||
|
VkWriteDescriptorSet descriptor_write;
|
||||||
|
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
||||||
|
descriptor_write.pNext = nullptr;
|
||||||
|
descriptor_write.dstSet = GetTextureDescriptor(descriptor_index);
|
||||||
|
descriptor_write.dstBinding = 0;
|
||||||
|
descriptor_write.dstArrayElement = 0;
|
||||||
|
descriptor_write.descriptorCount = 1;
|
||||||
|
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
||||||
|
descriptor_write.pImageInfo = &descriptor_image_info;
|
||||||
|
descriptor_write.pBufferInfo = nullptr;
|
||||||
|
descriptor_write.pTexelBufferView = nullptr;
|
||||||
|
dfn.vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, nullptr);
|
||||||
|
|
||||||
|
// Create and fill the upload buffer.
|
||||||
|
|
||||||
|
// data == nullptr is a special case for (1, 1, 1, 1), clearing rather than
|
||||||
|
// uploading in this case.
|
||||||
|
VkBuffer upload_buffer = VK_NULL_HANDLE;
|
||||||
|
VkDeviceMemory upload_buffer_memory = VK_NULL_HANDLE;
|
||||||
|
if (data) {
|
||||||
|
size_t data_size = sizeof(uint32_t) * width * height;
|
||||||
|
VkBufferCreateInfo upload_buffer_create_info;
|
||||||
|
upload_buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
|
||||||
|
upload_buffer_create_info.pNext = nullptr;
|
||||||
|
upload_buffer_create_info.flags = 0;
|
||||||
|
upload_buffer_create_info.size = VkDeviceSize(data_size);
|
||||||
|
upload_buffer_create_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
||||||
|
upload_buffer_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
||||||
|
upload_buffer_create_info.queueFamilyIndexCount = 0;
|
||||||
|
upload_buffer_create_info.pQueueFamilyIndices = nullptr;
|
||||||
|
if (dfn.vkCreateBuffer(device, &upload_buffer_create_info, nullptr,
|
||||||
|
&upload_buffer) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to create a Vulkan upload buffer for a {}x{} immediate "
|
||||||
|
"drawer texture",
|
||||||
|
width, height);
|
||||||
|
FreeTextureDescriptor(descriptor_index);
|
||||||
|
dfn.vkDestroyImageView(device, image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
|
||||||
|
VkMemoryAllocateInfo upload_buffer_memory_allocate_info;
|
||||||
|
VkMemoryRequirements upload_buffer_memory_requirements;
|
||||||
|
dfn.vkGetBufferMemoryRequirements(device, upload_buffer,
|
||||||
|
&upload_buffer_memory_requirements);
|
||||||
|
upload_buffer_memory_allocate_info.memoryTypeIndex =
|
||||||
|
util::ChooseHostMemoryType(
|
||||||
|
provider, upload_buffer_memory_requirements.memoryTypeBits, false);
|
||||||
|
if (upload_buffer_memory_allocate_info.memoryTypeIndex == UINT32_MAX) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to get a host-visible memory type for a Vulkan upload buffer "
|
||||||
|
"for a {}x{} immediate drawer texture",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyBuffer(device, upload_buffer, nullptr);
|
||||||
|
FreeTextureDescriptor(descriptor_index);
|
||||||
|
dfn.vkDestroyImageView(device, image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
upload_buffer_memory_allocate_info.sType =
|
||||||
|
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
||||||
|
VkMemoryDedicatedAllocateInfoKHR
|
||||||
|
upload_buffer_memory_dedicated_allocate_info;
|
||||||
|
if (dedicated_allocation_supported) {
|
||||||
|
upload_buffer_memory_dedicated_allocate_info.sType =
|
||||||
|
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR;
|
||||||
|
upload_buffer_memory_dedicated_allocate_info.pNext = nullptr;
|
||||||
|
upload_buffer_memory_dedicated_allocate_info.image = VK_NULL_HANDLE;
|
||||||
|
upload_buffer_memory_dedicated_allocate_info.buffer = upload_buffer;
|
||||||
|
upload_buffer_memory_allocate_info.pNext =
|
||||||
|
&upload_buffer_memory_dedicated_allocate_info;
|
||||||
|
} else {
|
||||||
|
upload_buffer_memory_allocate_info.pNext = nullptr;
|
||||||
|
}
|
||||||
|
upload_buffer_memory_allocate_info.allocationSize =
|
||||||
|
util::GetMappableMemorySize(provider,
|
||||||
|
upload_buffer_memory_requirements.size);
|
||||||
|
if (dfn.vkAllocateMemory(device, &upload_buffer_memory_allocate_info,
|
||||||
|
nullptr, &upload_buffer_memory) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to allocate memory for a Vulkan upload buffer for a {}x{} "
|
||||||
|
"immediate drawer texture",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyBuffer(device, upload_buffer, nullptr);
|
||||||
|
FreeTextureDescriptor(descriptor_index);
|
||||||
|
dfn.vkDestroyImageView(device, image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
if (dfn.vkBindBufferMemory(device, upload_buffer, upload_buffer_memory,
|
||||||
|
0) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to bind memory to a Vulkan upload buffer for a {}x{} "
|
||||||
|
"immediate drawer texture",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyBuffer(device, upload_buffer, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, upload_buffer_memory, nullptr);
|
||||||
|
FreeTextureDescriptor(descriptor_index);
|
||||||
|
dfn.vkDestroyImageView(device, image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
|
||||||
|
void* upload_buffer_mapping;
|
||||||
|
if (dfn.vkMapMemory(device, upload_buffer_memory, 0, VK_WHOLE_SIZE, 0,
|
||||||
|
&upload_buffer_mapping) != VK_SUCCESS) {
|
||||||
|
XELOGE(
|
||||||
|
"Failed to map Vulkan upload buffer memory for a {}x{} immediate "
|
||||||
|
"drawer texture",
|
||||||
|
width, height);
|
||||||
|
dfn.vkDestroyBuffer(device, upload_buffer, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, upload_buffer_memory, nullptr);
|
||||||
|
FreeTextureDescriptor(descriptor_index);
|
||||||
|
dfn.vkDestroyImageView(device, image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, image_memory, nullptr);
|
||||||
|
return SIZE_MAX;
|
||||||
|
}
|
||||||
|
std::memcpy(upload_buffer_mapping, data, data_size);
|
||||||
|
util::FlushMappedMemoryRange(
|
||||||
|
provider, upload_buffer_memory,
|
||||||
|
upload_buffer_memory_allocate_info.memoryTypeIndex);
|
||||||
|
dfn.vkUnmapMemory(device, upload_buffer_memory);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t texture_index;
|
||||||
|
if (!textures_free_.empty()) {
|
||||||
|
texture_index = textures_free_.back();
|
||||||
|
textures_free_.pop_back();
|
||||||
|
} else {
|
||||||
|
texture_index = textures_.size();
|
||||||
|
textures_.emplace_back();
|
||||||
|
}
|
||||||
|
Texture& texture = textures_[texture_index];
|
||||||
|
texture.immediate_texture = nullptr;
|
||||||
|
texture.image = image;
|
||||||
|
texture.memory = image_memory;
|
||||||
|
texture.image_view = image_view;
|
||||||
|
texture.descriptor_index = descriptor_index;
|
||||||
|
// The reference that will be returned to the caller.
|
||||||
|
texture.reference_count = 1;
|
||||||
|
|
||||||
|
PendingTextureUpload& pending_texture_upload =
|
||||||
|
texture_uploads_pending_.emplace_back();
|
||||||
|
// While the upload has not been yet completed, keep a reference to the
|
||||||
|
// texture because its lifetime is not tied to that of the ImmediateTexture
|
||||||
|
// (and thus to context's submissions) now.
|
||||||
|
++texture.reference_count;
|
||||||
|
pending_texture_upload.texture_index = texture_index;
|
||||||
|
pending_texture_upload.width = width;
|
||||||
|
pending_texture_upload.height = height;
|
||||||
|
pending_texture_upload.buffer = upload_buffer;
|
||||||
|
pending_texture_upload.buffer_memory = upload_buffer_memory;
|
||||||
|
|
||||||
|
return texture_index;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VulkanImmediateDrawer::ReleaseTexture(size_t index) {
|
||||||
|
assert_true(index < textures_.size());
|
||||||
|
Texture& texture = textures_[index];
|
||||||
|
assert_not_zero(texture.reference_count);
|
||||||
|
if (--texture.reference_count) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// If the texture is attached to a VulkanImmediateTexture, the
|
||||||
|
// VulkanImmediateTexture must hold a reference to it.
|
||||||
|
assert_null(texture.immediate_texture);
|
||||||
|
FreeTextureDescriptor(texture.descriptor_index);
|
||||||
|
const VulkanProvider& provider = context_.GetVulkanProvider();
|
||||||
|
const VulkanProvider::DeviceFunctions& dfn = provider.dfn();
|
||||||
|
VkDevice device = provider.device();
|
||||||
|
dfn.vkDestroyImageView(device, texture.image_view, nullptr);
|
||||||
|
dfn.vkDestroyImage(device, texture.image, nullptr);
|
||||||
|
dfn.vkFreeMemory(device, texture.memory, nullptr);
|
||||||
|
textures_free_.push_back(index);
|
||||||
|
// TODO(Triang3l): Track last usage submission because it turns out that
|
||||||
|
// deletion in the ImGui and the profiler actually happens before after
|
||||||
|
// awaiting submission completion.
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace vulkan
|
} // namespace vulkan
|
||||||
} // namespace ui
|
} // namespace ui
|
||||||
} // namespace xe
|
} // namespace xe
|
||||||
|
|
|
@ -10,7 +10,11 @@
|
||||||
#ifndef XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_
|
#ifndef XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_
|
||||||
#define XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_
|
#define XENIA_UI_VULKAN_VULKAN_IMMEDIATE_DRAWER_H_
|
||||||
|
|
||||||
|
#include <cstddef>
|
||||||
|
#include <deque>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <unordered_set>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
#include "xenia/ui/immediate_drawer.h"
|
#include "xenia/ui/immediate_drawer.h"
|
||||||
#include "xenia/ui/vulkan/vulkan_upload_buffer_pool.h"
|
#include "xenia/ui/vulkan/vulkan_upload_buffer_pool.h"
|
||||||
|
@ -48,10 +52,116 @@ class VulkanImmediateDrawer : public ImmediateDrawer {
|
||||||
} vertex;
|
} vertex;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class VulkanImmediateTexture : public ImmediateTexture {
|
||||||
|
public:
|
||||||
|
VulkanImmediateTexture(uint32_t width, uint32_t height,
|
||||||
|
VulkanImmediateDrawer* immediate_drawer,
|
||||||
|
uintptr_t immediate_drawer_handle)
|
||||||
|
: ImmediateTexture(width, height), immediate_drawer_(immediate_drawer) {
|
||||||
|
handle = immediate_drawer_handle;
|
||||||
|
}
|
||||||
|
~VulkanImmediateTexture() {
|
||||||
|
if (immediate_drawer_) {
|
||||||
|
immediate_drawer_->HandleImmediateTextureDestroyed(handle);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
void DetachFromImmediateDrawer() {
|
||||||
|
immediate_drawer_ = nullptr;
|
||||||
|
handle = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
VulkanImmediateDrawer* immediate_drawer_;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TextureDescriptorPool {
|
||||||
|
// Using uint64_t for recycled bits.
|
||||||
|
static constexpr uint32_t kDescriptorCount = 64;
|
||||||
|
VkDescriptorPool pool;
|
||||||
|
VkDescriptorSet sets[kDescriptorCount];
|
||||||
|
uint32_t index;
|
||||||
|
uint32_t unallocated_count;
|
||||||
|
uint64_t recycled_bits;
|
||||||
|
TextureDescriptorPool* unallocated_next;
|
||||||
|
TextureDescriptorPool* recycled_next;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Tracked separately from VulkanImmediateTexture because copying may take
|
||||||
|
// additional references.
|
||||||
|
struct Texture {
|
||||||
|
// Null for the white texture, reference held by the drawer itself instead
|
||||||
|
// of immediate textures.
|
||||||
|
VulkanImmediateTexture* immediate_texture;
|
||||||
|
VkImage image;
|
||||||
|
VkDeviceMemory memory;
|
||||||
|
VkImageView image_view;
|
||||||
|
uint32_t descriptor_index;
|
||||||
|
uint32_t reference_count;
|
||||||
|
};
|
||||||
|
|
||||||
bool EnsurePipelinesCreated();
|
bool EnsurePipelinesCreated();
|
||||||
|
|
||||||
|
// Allocates a combined image sampler in a pool and returns its index, or
|
||||||
|
// UINT32_MAX in case of failure.
|
||||||
|
uint32_t AllocateTextureDescriptor();
|
||||||
|
VkDescriptorSet GetTextureDescriptor(uint32_t descriptor_index) const;
|
||||||
|
void FreeTextureDescriptor(uint32_t descriptor_index);
|
||||||
|
|
||||||
|
// Returns SIZE_MAX in case of failure. The created texture will have a
|
||||||
|
// reference count of 1 plus references needed for uploading, but will not be
|
||||||
|
// attached to a VulkanImmediateTexture (will return the reference to the
|
||||||
|
// caller, in short). If data is null, a (1, 1, 1, 1) image will be created,
|
||||||
|
// which can be used as a replacement when drawing without a real texture.
|
||||||
|
size_t CreateVulkanTexture(uint32_t width, uint32_t height,
|
||||||
|
ImmediateTextureFilter filter, bool is_repeated,
|
||||||
|
const uint8_t* data);
|
||||||
|
void ReleaseTexture(size_t index);
|
||||||
|
uintptr_t GetTextureHandleForIndex(size_t index) const {
|
||||||
|
return index != white_texture_index_ ? uintptr_t(index + 1) : 0;
|
||||||
|
}
|
||||||
|
size_t GetTextureIndexForHandle(uintptr_t handle) const {
|
||||||
|
// 0 is a special value for no texture.
|
||||||
|
return handle ? size_t(handle - 1) : white_texture_index_;
|
||||||
|
}
|
||||||
|
// For calling from VulkanImmediateTexture.
|
||||||
|
void HandleImmediateTextureDestroyed(uintptr_t handle) {
|
||||||
|
size_t index = GetTextureIndexForHandle(handle);
|
||||||
|
if (index == white_texture_index_) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
textures_[index].immediate_texture = nullptr;
|
||||||
|
ReleaseTexture(index);
|
||||||
|
}
|
||||||
|
|
||||||
VulkanContext& context_;
|
VulkanContext& context_;
|
||||||
|
|
||||||
|
// Combined image sampler pools for textures.
|
||||||
|
VkDescriptorSetLayout texture_descriptor_set_layout_;
|
||||||
|
std::vector<TextureDescriptorPool*> texture_descriptor_pools_;
|
||||||
|
TextureDescriptorPool* texture_descriptor_pool_unallocated_first_ = nullptr;
|
||||||
|
TextureDescriptorPool* texture_descriptor_pool_recycled_first_ = nullptr;
|
||||||
|
|
||||||
|
std::vector<Texture> textures_;
|
||||||
|
std::vector<size_t> textures_free_;
|
||||||
|
struct PendingTextureUpload {
|
||||||
|
size_t texture_index;
|
||||||
|
uint32_t width;
|
||||||
|
uint32_t height;
|
||||||
|
// VK_NULL_HANDLE if need to clear rather than to copy.
|
||||||
|
VkBuffer buffer;
|
||||||
|
VkDeviceMemory buffer_memory;
|
||||||
|
};
|
||||||
|
std::vector<PendingTextureUpload> texture_uploads_pending_;
|
||||||
|
struct SubmittedTextureUpload {
|
||||||
|
size_t texture_index;
|
||||||
|
// VK_NULL_HANDLE if cleared rather than copied.
|
||||||
|
VkBuffer buffer;
|
||||||
|
VkDeviceMemory buffer_memory;
|
||||||
|
uint64_t submission_index;
|
||||||
|
};
|
||||||
|
std::deque<SubmittedTextureUpload> texture_uploads_submitted_;
|
||||||
|
size_t white_texture_index_;
|
||||||
|
|
||||||
VkPipelineLayout pipeline_layout_ = VK_NULL_HANDLE;
|
VkPipelineLayout pipeline_layout_ = VK_NULL_HANDLE;
|
||||||
|
|
||||||
std::unique_ptr<VulkanUploadBufferPool> vertex_buffer_pool_;
|
std::unique_ptr<VulkanUploadBufferPool> vertex_buffer_pool_;
|
||||||
|
@ -64,6 +174,7 @@ class VulkanImmediateDrawer : public ImmediateDrawer {
|
||||||
VkExtent2D current_render_target_extent_;
|
VkExtent2D current_render_target_extent_;
|
||||||
VkRect2D current_scissor_;
|
VkRect2D current_scissor_;
|
||||||
VkPipeline current_pipeline_;
|
VkPipeline current_pipeline_;
|
||||||
|
uint32_t current_texture_descriptor_index_;
|
||||||
bool batch_open_ = false;
|
bool batch_open_ = false;
|
||||||
bool batch_has_index_buffer_;
|
bool batch_has_index_buffer_;
|
||||||
};
|
};
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
#include "xenia/ui/vulkan/vulkan_provider.h"
|
#include "xenia/ui/vulkan/vulkan_provider.h"
|
||||||
|
|
||||||
|
#include <cfloat>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
@ -60,6 +61,12 @@ VulkanProvider::VulkanProvider(Window* main_window)
|
||||||
: GraphicsProvider(main_window) {}
|
: GraphicsProvider(main_window) {}
|
||||||
|
|
||||||
VulkanProvider::~VulkanProvider() {
|
VulkanProvider::~VulkanProvider() {
|
||||||
|
for (size_t i = 0; i < size_t(HostSampler::kCount); ++i) {
|
||||||
|
if (host_samplers_[i] != VK_NULL_HANDLE) {
|
||||||
|
dfn_.vkDestroySampler(device_, host_samplers_[i], nullptr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (device_ != VK_NULL_HANDLE) {
|
if (device_ != VK_NULL_HANDLE) {
|
||||||
ifn_.vkDestroyDevice(device_, nullptr);
|
ifn_.vkDestroyDevice(device_, nullptr);
|
||||||
}
|
}
|
||||||
|
@ -414,6 +421,7 @@ bool VulkanProvider::Initialize() {
|
||||||
memory_types_device_local_ = 0;
|
memory_types_device_local_ = 0;
|
||||||
memory_types_host_visible_ = 0;
|
memory_types_host_visible_ = 0;
|
||||||
memory_types_host_coherent_ = 0;
|
memory_types_host_coherent_ = 0;
|
||||||
|
memory_types_host_cached_ = 0;
|
||||||
for (uint32_t j = 0; j < memory_properties.memoryTypeCount; ++j) {
|
for (uint32_t j = 0; j < memory_properties.memoryTypeCount; ++j) {
|
||||||
VkMemoryPropertyFlags memory_property_flags =
|
VkMemoryPropertyFlags memory_property_flags =
|
||||||
memory_properties.memoryTypes[j].propertyFlags;
|
memory_properties.memoryTypes[j].propertyFlags;
|
||||||
|
@ -427,6 +435,9 @@ bool VulkanProvider::Initialize() {
|
||||||
if (memory_property_flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
|
if (memory_property_flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
|
||||||
memory_types_host_coherent_ |= memory_type_bit;
|
memory_types_host_coherent_ |= memory_type_bit;
|
||||||
}
|
}
|
||||||
|
if (memory_property_flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
|
||||||
|
memory_types_host_cached_ |= memory_type_bit;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!memory_types_device_local_ && !memory_types_host_visible_) {
|
if (!memory_types_device_local_ && !memory_types_host_visible_) {
|
||||||
// Shouldn't happen according to the specification.
|
// Shouldn't happen according to the specification.
|
||||||
|
@ -516,38 +527,52 @@ bool VulkanProvider::Initialize() {
|
||||||
nullptr;
|
nullptr;
|
||||||
XE_VULKAN_LOAD_DFN(vkAcquireNextImageKHR);
|
XE_VULKAN_LOAD_DFN(vkAcquireNextImageKHR);
|
||||||
XE_VULKAN_LOAD_DFN(vkAllocateCommandBuffers);
|
XE_VULKAN_LOAD_DFN(vkAllocateCommandBuffers);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkAllocateDescriptorSets);
|
||||||
XE_VULKAN_LOAD_DFN(vkAllocateMemory);
|
XE_VULKAN_LOAD_DFN(vkAllocateMemory);
|
||||||
XE_VULKAN_LOAD_DFN(vkBeginCommandBuffer);
|
XE_VULKAN_LOAD_DFN(vkBeginCommandBuffer);
|
||||||
XE_VULKAN_LOAD_DFN(vkBindBufferMemory);
|
XE_VULKAN_LOAD_DFN(vkBindBufferMemory);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkBindImageMemory);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdBeginRenderPass);
|
XE_VULKAN_LOAD_DFN(vkCmdBeginRenderPass);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCmdBindDescriptorSets);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdBindIndexBuffer);
|
XE_VULKAN_LOAD_DFN(vkCmdBindIndexBuffer);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdBindPipeline);
|
XE_VULKAN_LOAD_DFN(vkCmdBindPipeline);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdBindVertexBuffers);
|
XE_VULKAN_LOAD_DFN(vkCmdBindVertexBuffers);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCmdClearColorImage);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCmdCopyBufferToImage);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdDraw);
|
XE_VULKAN_LOAD_DFN(vkCmdDraw);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdDrawIndexed);
|
XE_VULKAN_LOAD_DFN(vkCmdDrawIndexed);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdEndRenderPass);
|
XE_VULKAN_LOAD_DFN(vkCmdEndRenderPass);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCmdPipelineBarrier);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdPushConstants);
|
XE_VULKAN_LOAD_DFN(vkCmdPushConstants);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdSetScissor);
|
XE_VULKAN_LOAD_DFN(vkCmdSetScissor);
|
||||||
XE_VULKAN_LOAD_DFN(vkCmdSetViewport);
|
XE_VULKAN_LOAD_DFN(vkCmdSetViewport);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateBuffer);
|
XE_VULKAN_LOAD_DFN(vkCreateBuffer);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateCommandPool);
|
XE_VULKAN_LOAD_DFN(vkCreateCommandPool);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCreateDescriptorPool);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCreateDescriptorSetLayout);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateFence);
|
XE_VULKAN_LOAD_DFN(vkCreateFence);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateFramebuffer);
|
XE_VULKAN_LOAD_DFN(vkCreateFramebuffer);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateGraphicsPipelines);
|
XE_VULKAN_LOAD_DFN(vkCreateGraphicsPipelines);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCreateImage);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateImageView);
|
XE_VULKAN_LOAD_DFN(vkCreateImageView);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreatePipelineLayout);
|
XE_VULKAN_LOAD_DFN(vkCreatePipelineLayout);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateRenderPass);
|
XE_VULKAN_LOAD_DFN(vkCreateRenderPass);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkCreateSampler);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateSemaphore);
|
XE_VULKAN_LOAD_DFN(vkCreateSemaphore);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateShaderModule);
|
XE_VULKAN_LOAD_DFN(vkCreateShaderModule);
|
||||||
XE_VULKAN_LOAD_DFN(vkCreateSwapchainKHR);
|
XE_VULKAN_LOAD_DFN(vkCreateSwapchainKHR);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyBuffer);
|
XE_VULKAN_LOAD_DFN(vkDestroyBuffer);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyCommandPool);
|
XE_VULKAN_LOAD_DFN(vkDestroyCommandPool);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkDestroyDescriptorPool);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkDestroyDescriptorSetLayout);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyFence);
|
XE_VULKAN_LOAD_DFN(vkDestroyFence);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyFramebuffer);
|
XE_VULKAN_LOAD_DFN(vkDestroyFramebuffer);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkDestroyImage);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyImageView);
|
XE_VULKAN_LOAD_DFN(vkDestroyImageView);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyPipeline);
|
XE_VULKAN_LOAD_DFN(vkDestroyPipeline);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyPipelineLayout);
|
XE_VULKAN_LOAD_DFN(vkDestroyPipelineLayout);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyRenderPass);
|
XE_VULKAN_LOAD_DFN(vkDestroyRenderPass);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkDestroySampler);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroySemaphore);
|
XE_VULKAN_LOAD_DFN(vkDestroySemaphore);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroyShaderModule);
|
XE_VULKAN_LOAD_DFN(vkDestroyShaderModule);
|
||||||
XE_VULKAN_LOAD_DFN(vkDestroySwapchainKHR);
|
XE_VULKAN_LOAD_DFN(vkDestroySwapchainKHR);
|
||||||
|
@ -556,12 +581,15 @@ bool VulkanProvider::Initialize() {
|
||||||
XE_VULKAN_LOAD_DFN(vkFreeMemory);
|
XE_VULKAN_LOAD_DFN(vkFreeMemory);
|
||||||
XE_VULKAN_LOAD_DFN(vkGetBufferMemoryRequirements);
|
XE_VULKAN_LOAD_DFN(vkGetBufferMemoryRequirements);
|
||||||
XE_VULKAN_LOAD_DFN(vkGetDeviceQueue);
|
XE_VULKAN_LOAD_DFN(vkGetDeviceQueue);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkGetImageMemoryRequirements);
|
||||||
XE_VULKAN_LOAD_DFN(vkGetSwapchainImagesKHR);
|
XE_VULKAN_LOAD_DFN(vkGetSwapchainImagesKHR);
|
||||||
XE_VULKAN_LOAD_DFN(vkMapMemory);
|
XE_VULKAN_LOAD_DFN(vkMapMemory);
|
||||||
XE_VULKAN_LOAD_DFN(vkResetCommandPool);
|
XE_VULKAN_LOAD_DFN(vkResetCommandPool);
|
||||||
XE_VULKAN_LOAD_DFN(vkResetFences);
|
XE_VULKAN_LOAD_DFN(vkResetFences);
|
||||||
XE_VULKAN_LOAD_DFN(vkQueuePresentKHR);
|
XE_VULKAN_LOAD_DFN(vkQueuePresentKHR);
|
||||||
XE_VULKAN_LOAD_DFN(vkQueueSubmit);
|
XE_VULKAN_LOAD_DFN(vkQueueSubmit);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkUnmapMemory);
|
||||||
|
XE_VULKAN_LOAD_DFN(vkUpdateDescriptorSets);
|
||||||
XE_VULKAN_LOAD_DFN(vkWaitForFences);
|
XE_VULKAN_LOAD_DFN(vkWaitForFences);
|
||||||
#undef XE_VULKAN_LOAD_DFN
|
#undef XE_VULKAN_LOAD_DFN
|
||||||
if (!device_functions_loaded) {
|
if (!device_functions_loaded) {
|
||||||
|
@ -583,10 +611,54 @@ bool VulkanProvider::Initialize() {
|
||||||
queue_sparse_binding_ = VK_NULL_HANDLE;
|
queue_sparse_binding_ = VK_NULL_HANDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create host-side samplers.
|
||||||
|
VkSamplerCreateInfo sampler_create_info = {};
|
||||||
|
sampler_create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
|
||||||
|
sampler_create_info.magFilter = VK_FILTER_NEAREST;
|
||||||
|
sampler_create_info.minFilter = VK_FILTER_NEAREST;
|
||||||
|
sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
|
||||||
|
sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
|
||||||
|
sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
|
||||||
|
sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
|
||||||
|
sampler_create_info.maxLod = FLT_MAX;
|
||||||
|
if (dfn_.vkCreateSampler(
|
||||||
|
device_, &sampler_create_info, nullptr,
|
||||||
|
&host_samplers_[size_t(HostSampler::kNearestClamp)]) != VK_SUCCESS) {
|
||||||
|
XELOGE("Failed to create the nearest-neighbor clamping Vulkan sampler");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
sampler_create_info.magFilter = VK_FILTER_LINEAR;
|
||||||
|
sampler_create_info.minFilter = VK_FILTER_LINEAR;
|
||||||
|
sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
|
||||||
|
if (dfn_.vkCreateSampler(
|
||||||
|
device_, &sampler_create_info, nullptr,
|
||||||
|
&host_samplers_[size_t(HostSampler::kLinearClamp)]) != VK_SUCCESS) {
|
||||||
|
XELOGE("Failed to create the bilinear-filtering clamping Vulkan sampler");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
sampler_create_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
|
||||||
|
sampler_create_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
|
||||||
|
sampler_create_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
|
||||||
|
if (dfn_.vkCreateSampler(
|
||||||
|
device_, &sampler_create_info, nullptr,
|
||||||
|
&host_samplers_[size_t(HostSampler::kLinearRepeat)]) != VK_SUCCESS) {
|
||||||
|
XELOGE("Failed to create the bilinear-filtering repeating Vulkan sampler");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
sampler_create_info.magFilter = VK_FILTER_NEAREST;
|
||||||
|
sampler_create_info.minFilter = VK_FILTER_NEAREST;
|
||||||
|
sampler_create_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
|
||||||
|
if (dfn_.vkCreateSampler(
|
||||||
|
device_, &sampler_create_info, nullptr,
|
||||||
|
&host_samplers_[size_t(HostSampler::kNearestRepeat)]) != VK_SUCCESS) {
|
||||||
|
XELOGE("Failed to create the nearest-neighbor repeating Vulkan sampler");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<GraphicsContext> VulkanProvider::CreateContext(
|
std::unique_ptr<GraphicsContext> VulkanProvider::CreateHostContext(
|
||||||
Window* target_window) {
|
Window* target_window) {
|
||||||
auto new_context =
|
auto new_context =
|
||||||
std::unique_ptr<VulkanContext>(new VulkanContext(this, target_window));
|
std::unique_ptr<VulkanContext>(new VulkanContext(this, target_window));
|
||||||
|
@ -596,7 +668,7 @@ std::unique_ptr<GraphicsContext> VulkanProvider::CreateContext(
|
||||||
return std::unique_ptr<GraphicsContext>(new_context.release());
|
return std::unique_ptr<GraphicsContext>(new_context.release());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<GraphicsContext> VulkanProvider::CreateOffscreenContext() {
|
std::unique_ptr<GraphicsContext> VulkanProvider::CreateEmulationContext() {
|
||||||
auto new_context =
|
auto new_context =
|
||||||
std::unique_ptr<VulkanContext>(new VulkanContext(this, nullptr));
|
std::unique_ptr<VulkanContext>(new VulkanContext(this, nullptr));
|
||||||
if (!new_context->Initialize()) {
|
if (!new_context->Initialize()) {
|
||||||
|
|
|
@ -43,9 +43,9 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
|
|
||||||
static std::unique_ptr<VulkanProvider> Create(Window* main_window);
|
static std::unique_ptr<VulkanProvider> Create(Window* main_window);
|
||||||
|
|
||||||
std::unique_ptr<GraphicsContext> CreateContext(
|
std::unique_ptr<GraphicsContext> CreateHostContext(
|
||||||
Window* target_window) override;
|
Window* target_window) override;
|
||||||
std::unique_ptr<GraphicsContext> CreateOffscreenContext() override;
|
std::unique_ptr<GraphicsContext> CreateEmulationContext() override;
|
||||||
|
|
||||||
struct LibraryFunctions {
|
struct LibraryFunctions {
|
||||||
// From the module.
|
// From the module.
|
||||||
|
@ -113,6 +113,9 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
uint32_t memory_types_host_coherent() const {
|
uint32_t memory_types_host_coherent() const {
|
||||||
return memory_types_host_coherent_;
|
return memory_types_host_coherent_;
|
||||||
}
|
}
|
||||||
|
uint32_t memory_types_host_cached() const {
|
||||||
|
return memory_types_host_cached_;
|
||||||
|
}
|
||||||
// FIXME(Triang3l): Allow a separate queue for present - see
|
// FIXME(Triang3l): Allow a separate queue for present - see
|
||||||
// vulkan_provider.cc for details.
|
// vulkan_provider.cc for details.
|
||||||
uint32_t queue_family_graphics_compute() const {
|
uint32_t queue_family_graphics_compute() const {
|
||||||
|
@ -123,38 +126,52 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
struct DeviceFunctions {
|
struct DeviceFunctions {
|
||||||
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
|
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
|
||||||
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
|
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers;
|
||||||
|
PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets;
|
||||||
PFN_vkAllocateMemory vkAllocateMemory;
|
PFN_vkAllocateMemory vkAllocateMemory;
|
||||||
PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
|
PFN_vkBeginCommandBuffer vkBeginCommandBuffer;
|
||||||
PFN_vkBindBufferMemory vkBindBufferMemory;
|
PFN_vkBindBufferMemory vkBindBufferMemory;
|
||||||
|
PFN_vkBindImageMemory vkBindImageMemory;
|
||||||
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
|
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass;
|
||||||
|
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets;
|
||||||
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
|
PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer;
|
||||||
PFN_vkCmdBindPipeline vkCmdBindPipeline;
|
PFN_vkCmdBindPipeline vkCmdBindPipeline;
|
||||||
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
|
PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers;
|
||||||
|
PFN_vkCmdClearColorImage vkCmdClearColorImage;
|
||||||
|
PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage;
|
||||||
PFN_vkCmdDraw vkCmdDraw;
|
PFN_vkCmdDraw vkCmdDraw;
|
||||||
PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
|
PFN_vkCmdDrawIndexed vkCmdDrawIndexed;
|
||||||
PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
|
PFN_vkCmdEndRenderPass vkCmdEndRenderPass;
|
||||||
|
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier;
|
||||||
PFN_vkCmdPushConstants vkCmdPushConstants;
|
PFN_vkCmdPushConstants vkCmdPushConstants;
|
||||||
PFN_vkCmdSetScissor vkCmdSetScissor;
|
PFN_vkCmdSetScissor vkCmdSetScissor;
|
||||||
PFN_vkCmdSetViewport vkCmdSetViewport;
|
PFN_vkCmdSetViewport vkCmdSetViewport;
|
||||||
PFN_vkCreateBuffer vkCreateBuffer;
|
PFN_vkCreateBuffer vkCreateBuffer;
|
||||||
PFN_vkCreateCommandPool vkCreateCommandPool;
|
PFN_vkCreateCommandPool vkCreateCommandPool;
|
||||||
|
PFN_vkCreateDescriptorPool vkCreateDescriptorPool;
|
||||||
|
PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout;
|
||||||
PFN_vkCreateFence vkCreateFence;
|
PFN_vkCreateFence vkCreateFence;
|
||||||
PFN_vkCreateFramebuffer vkCreateFramebuffer;
|
PFN_vkCreateFramebuffer vkCreateFramebuffer;
|
||||||
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
|
PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines;
|
||||||
|
PFN_vkCreateImage vkCreateImage;
|
||||||
PFN_vkCreateImageView vkCreateImageView;
|
PFN_vkCreateImageView vkCreateImageView;
|
||||||
PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
|
PFN_vkCreatePipelineLayout vkCreatePipelineLayout;
|
||||||
PFN_vkCreateRenderPass vkCreateRenderPass;
|
PFN_vkCreateRenderPass vkCreateRenderPass;
|
||||||
|
PFN_vkCreateSampler vkCreateSampler;
|
||||||
PFN_vkCreateSemaphore vkCreateSemaphore;
|
PFN_vkCreateSemaphore vkCreateSemaphore;
|
||||||
PFN_vkCreateShaderModule vkCreateShaderModule;
|
PFN_vkCreateShaderModule vkCreateShaderModule;
|
||||||
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
|
PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR;
|
||||||
PFN_vkDestroyBuffer vkDestroyBuffer;
|
PFN_vkDestroyBuffer vkDestroyBuffer;
|
||||||
PFN_vkDestroyCommandPool vkDestroyCommandPool;
|
PFN_vkDestroyCommandPool vkDestroyCommandPool;
|
||||||
|
PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool;
|
||||||
|
PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout;
|
||||||
PFN_vkDestroyFence vkDestroyFence;
|
PFN_vkDestroyFence vkDestroyFence;
|
||||||
PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
|
PFN_vkDestroyFramebuffer vkDestroyFramebuffer;
|
||||||
|
PFN_vkDestroyImage vkDestroyImage;
|
||||||
PFN_vkDestroyImageView vkDestroyImageView;
|
PFN_vkDestroyImageView vkDestroyImageView;
|
||||||
PFN_vkDestroyPipeline vkDestroyPipeline;
|
PFN_vkDestroyPipeline vkDestroyPipeline;
|
||||||
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
|
PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout;
|
||||||
PFN_vkDestroyRenderPass vkDestroyRenderPass;
|
PFN_vkDestroyRenderPass vkDestroyRenderPass;
|
||||||
|
PFN_vkDestroySampler vkDestroySampler;
|
||||||
PFN_vkDestroySemaphore vkDestroySemaphore;
|
PFN_vkDestroySemaphore vkDestroySemaphore;
|
||||||
PFN_vkDestroyShaderModule vkDestroyShaderModule;
|
PFN_vkDestroyShaderModule vkDestroyShaderModule;
|
||||||
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
|
PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR;
|
||||||
|
@ -163,12 +180,15 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
PFN_vkFreeMemory vkFreeMemory;
|
PFN_vkFreeMemory vkFreeMemory;
|
||||||
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
|
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
|
||||||
PFN_vkGetDeviceQueue vkGetDeviceQueue;
|
PFN_vkGetDeviceQueue vkGetDeviceQueue;
|
||||||
|
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
|
||||||
PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
|
PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR;
|
||||||
PFN_vkMapMemory vkMapMemory;
|
PFN_vkMapMemory vkMapMemory;
|
||||||
PFN_vkResetCommandPool vkResetCommandPool;
|
PFN_vkResetCommandPool vkResetCommandPool;
|
||||||
PFN_vkResetFences vkResetFences;
|
PFN_vkResetFences vkResetFences;
|
||||||
PFN_vkQueuePresentKHR vkQueuePresentKHR;
|
PFN_vkQueuePresentKHR vkQueuePresentKHR;
|
||||||
PFN_vkQueueSubmit vkQueueSubmit;
|
PFN_vkQueueSubmit vkQueueSubmit;
|
||||||
|
PFN_vkUnmapMemory vkUnmapMemory;
|
||||||
|
PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets;
|
||||||
PFN_vkWaitForFences vkWaitForFences;
|
PFN_vkWaitForFences vkWaitForFences;
|
||||||
};
|
};
|
||||||
const DeviceFunctions& dfn() const { return dfn_; }
|
const DeviceFunctions& dfn() const { return dfn_; }
|
||||||
|
@ -177,6 +197,22 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
// May be VK_NULL_HANDLE if not available.
|
// May be VK_NULL_HANDLE if not available.
|
||||||
VkQueue queue_sparse_binding() const { return queue_sparse_binding_; }
|
VkQueue queue_sparse_binding() const { return queue_sparse_binding_; }
|
||||||
|
|
||||||
|
// Samplers that may be useful for host needs. Only these samplers should be
|
||||||
|
// used in host, non-emulation contexts, because the total number of samplers
|
||||||
|
// is heavily limited (4000) on Nvidia GPUs - the rest of samplers are
|
||||||
|
// allocated for emulation.
|
||||||
|
enum class HostSampler {
|
||||||
|
kNearestClamp,
|
||||||
|
kLinearClamp,
|
||||||
|
kNearestRepeat,
|
||||||
|
kLinearRepeat,
|
||||||
|
|
||||||
|
kCount,
|
||||||
|
};
|
||||||
|
VkSampler GetHostSampler(HostSampler sampler) const {
|
||||||
|
return host_samplers_[size_t(sampler)];
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
explicit VulkanProvider(Window* main_window);
|
explicit VulkanProvider(Window* main_window);
|
||||||
|
|
||||||
|
@ -200,6 +236,7 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
uint32_t memory_types_device_local_;
|
uint32_t memory_types_device_local_;
|
||||||
uint32_t memory_types_host_visible_;
|
uint32_t memory_types_host_visible_;
|
||||||
uint32_t memory_types_host_coherent_;
|
uint32_t memory_types_host_coherent_;
|
||||||
|
uint32_t memory_types_host_cached_;
|
||||||
uint32_t queue_family_graphics_compute_;
|
uint32_t queue_family_graphics_compute_;
|
||||||
|
|
||||||
VkDevice device_ = VK_NULL_HANDLE;
|
VkDevice device_ = VK_NULL_HANDLE;
|
||||||
|
@ -207,6 +244,8 @@ class VulkanProvider : public GraphicsProvider {
|
||||||
VkQueue queue_graphics_compute_;
|
VkQueue queue_graphics_compute_;
|
||||||
// May be VK_NULL_HANDLE if not available.
|
// May be VK_NULL_HANDLE if not available.
|
||||||
VkQueue queue_sparse_binding_;
|
VkQueue queue_sparse_binding_;
|
||||||
|
|
||||||
|
VkSampler host_samplers_[size_t(HostSampler::kCount)] = {};
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace vulkan
|
} // namespace vulkan
|
||||||
|
|
|
@ -13,24 +13,21 @@
|
||||||
|
|
||||||
#include "xenia/base/logging.h"
|
#include "xenia/base/logging.h"
|
||||||
#include "xenia/base/math.h"
|
#include "xenia/base/math.h"
|
||||||
|
#include "xenia/ui/vulkan/vulkan_util.h"
|
||||||
|
|
||||||
namespace xe {
|
namespace xe {
|
||||||
namespace ui {
|
namespace ui {
|
||||||
namespace vulkan {
|
namespace vulkan {
|
||||||
|
|
||||||
|
// Memory mappings are always aligned to nonCoherentAtomSize, so for simplicity,
|
||||||
|
// round the page size to it now via GetMappableMemorySize.
|
||||||
VulkanUploadBufferPool::VulkanUploadBufferPool(const VulkanProvider& provider,
|
VulkanUploadBufferPool::VulkanUploadBufferPool(const VulkanProvider& provider,
|
||||||
VkBufferUsageFlags usage,
|
VkBufferUsageFlags usage,
|
||||||
size_t page_size)
|
size_t page_size)
|
||||||
: GraphicsUploadBufferPool(page_size), provider_(provider), usage_(usage) {
|
: GraphicsUploadBufferPool(size_t(
|
||||||
VkDeviceSize non_coherent_atom_size =
|
util::GetMappableMemorySize(provider, VkDeviceSize(page_size)))),
|
||||||
provider_.device_properties().limits.nonCoherentAtomSize;
|
provider_(provider),
|
||||||
// Memory mappings are always aligned to nonCoherentAtomSize, so for
|
usage_(usage) {}
|
||||||
// simplicity, round the page size to it now. On some Android implementations,
|
|
||||||
// nonCoherentAtomSize is 0, not 1.
|
|
||||||
if (non_coherent_atom_size > 1) {
|
|
||||||
page_size_ = xe::round_up(page_size_, non_coherent_atom_size);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t* VulkanUploadBufferPool::Request(uint64_t submission_index, size_t size,
|
uint8_t* VulkanUploadBufferPool::Request(uint64_t submission_index, size_t size,
|
||||||
size_t alignment, VkBuffer& buffer_out,
|
size_t alignment, VkBuffer& buffer_out,
|
||||||
|
@ -96,10 +93,9 @@ VulkanUploadBufferPool::CreatePageImplementation() {
|
||||||
if (memory_type_ == kMemoryTypeUnknown) {
|
if (memory_type_ == kMemoryTypeUnknown) {
|
||||||
VkMemoryRequirements memory_requirements;
|
VkMemoryRequirements memory_requirements;
|
||||||
dfn.vkGetBufferMemoryRequirements(device, buffer, &memory_requirements);
|
dfn.vkGetBufferMemoryRequirements(device, buffer, &memory_requirements);
|
||||||
uint32_t memory_types_host_visible = provider_.memory_types_host_visible();
|
memory_type_ = util::ChooseHostMemoryType(
|
||||||
if (!xe::bit_scan_forward(
|
provider_, memory_requirements.memoryTypeBits, false);
|
||||||
memory_requirements.memoryTypeBits & memory_types_host_visible,
|
if (memory_type_ == UINT32_MAX) {
|
||||||
&memory_type_)) {
|
|
||||||
XELOGE(
|
XELOGE(
|
||||||
"No host-visible memory types can store an Vulkan upload buffer with "
|
"No host-visible memory types can store an Vulkan upload buffer with "
|
||||||
"{} bytes",
|
"{} bytes",
|
||||||
|
@ -125,11 +121,10 @@ VulkanUploadBufferPool::CreatePageImplementation() {
|
||||||
VkMemoryRequirements memory_requirements_expanded;
|
VkMemoryRequirements memory_requirements_expanded;
|
||||||
dfn.vkGetBufferMemoryRequirements(device, buffer_expanded,
|
dfn.vkGetBufferMemoryRequirements(device, buffer_expanded,
|
||||||
&memory_requirements_expanded);
|
&memory_requirements_expanded);
|
||||||
uint32_t memory_type_expanded;
|
uint32_t memory_type_expanded = util::ChooseHostMemoryType(
|
||||||
|
provider_, memory_requirements.memoryTypeBits, false);
|
||||||
if (memory_requirements_expanded.size <= allocation_size_ &&
|
if (memory_requirements_expanded.size <= allocation_size_ &&
|
||||||
xe::bit_scan_forward(memory_requirements_expanded.memoryTypeBits &
|
memory_type_expanded != UINT32_MAX) {
|
||||||
memory_types_host_visible,
|
|
||||||
&memory_type_expanded)) {
|
|
||||||
// page_size_ must be aligned to nonCoherentAtomSize.
|
// page_size_ must be aligned to nonCoherentAtomSize.
|
||||||
page_size_ = size_t(allocation_size_aligned);
|
page_size_ = size_t(allocation_size_aligned);
|
||||||
allocation_size_ = memory_requirements_expanded.size;
|
allocation_size_ = memory_requirements_expanded.size;
|
||||||
|
@ -190,28 +185,9 @@ VulkanUploadBufferPool::CreatePageImplementation() {
|
||||||
|
|
||||||
void VulkanUploadBufferPool::FlushPageWrites(Page* page, size_t offset,
|
void VulkanUploadBufferPool::FlushPageWrites(Page* page, size_t offset,
|
||||||
size_t size) {
|
size_t size) {
|
||||||
if (provider_.memory_types_host_coherent() & (uint32_t(1) << memory_type_)) {
|
util::FlushMappedMemoryRange(
|
||||||
return;
|
provider_, static_cast<const VulkanPage*>(page)->memory_, memory_type_,
|
||||||
}
|
VkDeviceSize(offset), VkDeviceSize(size));
|
||||||
const VulkanProvider::DeviceFunctions& dfn = provider_.dfn();
|
|
||||||
VkDevice device = provider_.device();
|
|
||||||
VkMappedMemoryRange range;
|
|
||||||
range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
|
||||||
range.pNext = nullptr;
|
|
||||||
range.memory = static_cast<const VulkanPage*>(page)->memory_;
|
|
||||||
range.offset = VkDeviceSize(offset);
|
|
||||||
range.size = VkDeviceSize(size);
|
|
||||||
VkDeviceSize non_coherent_atom_size =
|
|
||||||
provider_.device_properties().limits.nonCoherentAtomSize;
|
|
||||||
// On some Android implementations, nonCoherentAtomSize is 0, not 1.
|
|
||||||
if (non_coherent_atom_size > 1) {
|
|
||||||
VkDeviceSize end =
|
|
||||||
xe::round_up(range.offset + range.size, non_coherent_atom_size);
|
|
||||||
range.offset =
|
|
||||||
range.offset / non_coherent_atom_size * non_coherent_atom_size;
|
|
||||||
range.size = end - range.offset;
|
|
||||||
}
|
|
||||||
dfn.vkFlushMappedMemoryRanges(device, 1, &range);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
VulkanUploadBufferPool::VulkanPage::~VulkanPage() {
|
VulkanUploadBufferPool::VulkanPage::~VulkanPage() {
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
/**
|
||||||
|
******************************************************************************
|
||||||
|
* Xenia : Xbox 360 Emulator Research Project *
|
||||||
|
******************************************************************************
|
||||||
|
* Copyright 2020 Ben Vanik. All rights reserved. *
|
||||||
|
* Released under the BSD license - see LICENSE in the root for more details. *
|
||||||
|
******************************************************************************
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "xenia/ui/vulkan/vulkan_util.h"
|
||||||
|
|
||||||
|
#include "xenia/base/math.h"
|
||||||
|
#include "xenia/ui/vulkan/vulkan_provider.h"
|
||||||
|
|
||||||
|
namespace xe {
|
||||||
|
namespace ui {
|
||||||
|
namespace vulkan {
|
||||||
|
namespace util {
|
||||||
|
|
||||||
|
void FlushMappedMemoryRange(const VulkanProvider& provider,
|
||||||
|
VkDeviceMemory memory, uint32_t memory_type,
|
||||||
|
VkDeviceSize offset, VkDeviceSize size) {
|
||||||
|
if (!size ||
|
||||||
|
(provider.memory_types_host_coherent() & (uint32_t(1) << memory_type))) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
VkMappedMemoryRange range;
|
||||||
|
range.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
||||||
|
range.pNext = nullptr;
|
||||||
|
range.memory = memory;
|
||||||
|
range.offset = offset;
|
||||||
|
range.size = size;
|
||||||
|
VkDeviceSize non_coherent_atom_size =
|
||||||
|
provider.device_properties().limits.nonCoherentAtomSize;
|
||||||
|
// On some Android implementations, nonCoherentAtomSize is 0, not 1.
|
||||||
|
if (non_coherent_atom_size > 1) {
|
||||||
|
range.offset = offset / non_coherent_atom_size * non_coherent_atom_size;
|
||||||
|
if (size != VK_WHOLE_SIZE) {
|
||||||
|
range.size =
|
||||||
|
xe::round_up(offset + size, non_coherent_atom_size) - range.offset;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
provider.dfn().vkFlushMappedMemoryRanges(provider.device(), 1, &range);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace util
|
||||||
|
} // namespace vulkan
|
||||||
|
} // namespace ui
|
||||||
|
} // namespace xe
|
|
@ -10,6 +10,7 @@
|
||||||
#ifndef XENIA_UI_VULKAN_VULKAN_UTIL_H_
|
#ifndef XENIA_UI_VULKAN_VULKAN_UTIL_H_
|
||||||
#define XENIA_UI_VULKAN_VULKAN_UTIL_H_
|
#define XENIA_UI_VULKAN_VULKAN_UTIL_H_
|
||||||
|
|
||||||
|
#include "xenia/base/math.h"
|
||||||
#include "xenia/ui/vulkan/vulkan_provider.h"
|
#include "xenia/ui/vulkan/vulkan_provider.h"
|
||||||
|
|
||||||
namespace xe {
|
namespace xe {
|
||||||
|
@ -37,6 +38,53 @@ inline bool DestroyAndNullHandle(F* destroy_function, P parent, T& handle) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline VkDeviceSize GetMappableMemorySize(const VulkanProvider& provider,
|
||||||
|
VkDeviceSize size) {
|
||||||
|
VkDeviceSize non_coherent_atom_size =
|
||||||
|
provider.device_properties().limits.nonCoherentAtomSize;
|
||||||
|
// On some Android implementations, nonCoherentAtomSize is 0, not 1.
|
||||||
|
if (non_coherent_atom_size > 1) {
|
||||||
|
size = xe::round_up(size, non_coherent_atom_size, false);
|
||||||
|
}
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline uint32_t ChooseHostMemoryType(const VulkanProvider& provider,
|
||||||
|
uint32_t supported_types,
|
||||||
|
bool is_readback) {
|
||||||
|
supported_types &= provider.memory_types_host_visible();
|
||||||
|
uint32_t host_cached = provider.memory_types_host_cached();
|
||||||
|
uint32_t memory_type;
|
||||||
|
// For upload, uncached is preferred so writes do not pollute the CPU cache.
|
||||||
|
// For readback, cached is preferred so multiple CPU reads are fast.
|
||||||
|
// If the preferred caching behavior is not available, pick any host-visible.
|
||||||
|
if (xe::bit_scan_forward(
|
||||||
|
supported_types & (is_readback ? host_cached : ~host_cached),
|
||||||
|
&memory_type) ||
|
||||||
|
xe::bit_scan_forward(supported_types, &memory_type)) {
|
||||||
|
return memory_type;
|
||||||
|
}
|
||||||
|
return UINT32_MAX;
|
||||||
|
}
|
||||||
|
|
||||||
|
void FlushMappedMemoryRange(const VulkanProvider& provider,
|
||||||
|
VkDeviceMemory memory, uint32_t memory_type,
|
||||||
|
VkDeviceSize offset = 0,
|
||||||
|
VkDeviceSize size = VK_WHOLE_SIZE);
|
||||||
|
|
||||||
|
inline void InitializeSubresourceRange(
|
||||||
|
VkImageSubresourceRange& range,
|
||||||
|
VkImageAspectFlags aspect_mask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||||
|
uint32_t base_mip_level = 0, uint32_t level_count = VK_REMAINING_MIP_LEVELS,
|
||||||
|
uint32_t base_array_layer = 0,
|
||||||
|
uint32_t layer_count = VK_REMAINING_ARRAY_LAYERS) {
|
||||||
|
range.aspectMask = aspect_mask;
|
||||||
|
range.baseMipLevel = base_mip_level;
|
||||||
|
range.levelCount = level_count;
|
||||||
|
range.baseArrayLayer = base_array_layer;
|
||||||
|
range.layerCount = layer_count;
|
||||||
|
}
|
||||||
|
|
||||||
inline VkShaderModule CreateShaderModule(const VulkanProvider& provider,
|
inline VkShaderModule CreateShaderModule(const VulkanProvider& provider,
|
||||||
const void* code, size_t code_size) {
|
const void* code, size_t code_size) {
|
||||||
VkShaderModuleCreateInfo shader_module_create_info;
|
VkShaderModuleCreateInfo shader_module_create_info;
|
||||||
|
|
|
@ -73,7 +73,7 @@ int window_demo_main(const std::vector<std::string>& args) {
|
||||||
// The window will finish initialization wtih the context (loading
|
// The window will finish initialization wtih the context (loading
|
||||||
// resources, etc).
|
// resources, etc).
|
||||||
graphics_provider = CreateDemoGraphicsProvider(window.get());
|
graphics_provider = CreateDemoGraphicsProvider(window.get());
|
||||||
window->set_context(graphics_provider->CreateContext(window.get()));
|
window->set_context(graphics_provider->CreateHostContext(window.get()));
|
||||||
|
|
||||||
// Setup the profiler display.
|
// Setup the profiler display.
|
||||||
GraphicsContextLock context_lock(window->context());
|
GraphicsContextLock context_lock(window->context());
|
||||||
|
|
Loading…
Reference in New Issue