2017-02-16 15:58:40 +00:00
|
|
|
// Copyright 2016 Dolphin Emulator Project
|
2021-07-05 01:22:19 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2017-02-16 15:58:40 +00:00
|
|
|
|
2021-12-10 02:22:16 +00:00
|
|
|
#include "Common/Config/Config.h"
|
|
|
|
|
2017-02-16 15:58:40 +00:00
|
|
|
#include <algorithm>
|
2020-12-05 17:24:41 +00:00
|
|
|
#include <atomic>
|
2017-02-16 15:58:40 +00:00
|
|
|
#include <list>
|
|
|
|
#include <map>
|
2020-10-28 08:51:34 +00:00
|
|
|
#include <mutex>
|
2019-07-30 14:40:52 +00:00
|
|
|
#include <shared_mutex>
|
2017-02-16 15:58:40 +00:00
|
|
|
|
|
|
|
namespace Config
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
using Layers = std::map<LayerType, std::shared_ptr<Layer>>;
|
|
|
|
|
2017-02-16 15:58:40 +00:00
|
|
|
static Layers s_layers;
|
|
|
|
static std::list<ConfigChangedCallback> s_callbacks;
|
2019-03-03 16:58:37 +00:00
|
|
|
static u32 s_callback_guards = 0;
|
2020-12-05 17:24:41 +00:00
|
|
|
static std::atomic<u64> s_config_version = 0;
|
2017-02-16 15:58:40 +00:00
|
|
|
|
2019-07-30 14:40:52 +00:00
|
|
|
static std::shared_mutex s_layers_rw_lock;
|
2017-02-16 15:58:40 +00:00
|
|
|
|
2019-07-30 14:40:52 +00:00
|
|
|
using ReadLock = std::shared_lock<std::shared_mutex>;
|
|
|
|
using WriteLock = std::unique_lock<std::shared_mutex>;
|
|
|
|
|
2019-12-27 18:17:56 +00:00
|
|
|
static void AddLayerInternal(std::shared_ptr<Layer> layer)
|
2017-02-16 15:58:40 +00:00
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
{
|
|
|
|
WriteLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
const Config::LayerType layer_type = layer->GetLayer();
|
|
|
|
s_layers.insert_or_assign(layer_type, std::move(layer));
|
|
|
|
}
|
2020-12-05 17:24:41 +00:00
|
|
|
OnConfigChanged();
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void AddLayer(std::unique_ptr<ConfigLayerLoader> loader)
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
AddLayerInternal(std::make_shared<Layer>(std::move(loader)));
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
2019-07-30 14:40:52 +00:00
|
|
|
std::shared_ptr<Layer> GetLayer(LayerType layer)
|
2017-02-16 15:58:40 +00:00
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
ReadLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
std::shared_ptr<Layer> result;
|
|
|
|
const auto it = s_layers.find(layer);
|
|
|
|
if (it != s_layers.end())
|
|
|
|
{
|
|
|
|
result = it->second;
|
|
|
|
}
|
|
|
|
return result;
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void RemoveLayer(LayerType layer)
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
{
|
|
|
|
WriteLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
s_layers.erase(layer);
|
|
|
|
}
|
2020-12-05 17:24:41 +00:00
|
|
|
OnConfigChanged();
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void AddConfigChangedCallback(ConfigChangedCallback func)
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
s_callbacks.emplace_back(std::move(func));
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
2020-12-05 17:24:41 +00:00
|
|
|
void OnConfigChanged()
|
2017-02-16 15:58:40 +00:00
|
|
|
{
|
2020-12-29 18:46:41 +00:00
|
|
|
// Increment the config version to invalidate caches.
|
|
|
|
// To ensure that getters do not return stale data, this should always be done
|
|
|
|
// even when callbacks are suppressed.
|
|
|
|
s_config_version.fetch_add(1, std::memory_order_relaxed);
|
|
|
|
|
2019-03-03 16:58:37 +00:00
|
|
|
if (s_callback_guards)
|
|
|
|
return;
|
|
|
|
|
2017-02-16 15:58:40 +00:00
|
|
|
for (const auto& callback : s_callbacks)
|
|
|
|
callback();
|
|
|
|
}
|
|
|
|
|
2020-12-05 17:24:41 +00:00
|
|
|
u64 GetConfigVersion()
|
|
|
|
{
|
|
|
|
return s_config_version.load(std::memory_order_relaxed);
|
|
|
|
}
|
|
|
|
|
2017-02-16 15:58:40 +00:00
|
|
|
// Explicit load and save of layers
|
|
|
|
void Load()
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
{
|
|
|
|
ReadLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
for (auto& layer : s_layers)
|
|
|
|
layer.second->Load();
|
|
|
|
}
|
2020-12-05 17:24:41 +00:00
|
|
|
OnConfigChanged();
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Save()
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
{
|
|
|
|
ReadLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
for (auto& layer : s_layers)
|
|
|
|
layer.second->Save();
|
|
|
|
}
|
2020-12-05 17:24:41 +00:00
|
|
|
OnConfigChanged();
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Init()
|
|
|
|
{
|
2017-05-13 22:34:49 +00:00
|
|
|
// These layers contain temporary values
|
|
|
|
ClearCurrentRunLayer();
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void Shutdown()
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
WriteLock lock(s_layers_rw_lock);
|
|
|
|
|
2017-02-16 15:58:40 +00:00
|
|
|
s_layers.clear();
|
|
|
|
s_callbacks.clear();
|
|
|
|
}
|
|
|
|
|
2017-05-13 22:34:49 +00:00
|
|
|
void ClearCurrentRunLayer()
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
WriteLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
s_layers.insert_or_assign(LayerType::CurrentRun, std::make_shared<Layer>(LayerType::CurrentRun));
|
2017-05-13 22:34:49 +00:00
|
|
|
}
|
|
|
|
|
2017-02-16 15:58:40 +00:00
|
|
|
static const std::map<System, std::string> system_to_name = {
|
2019-09-06 15:09:30 +00:00
|
|
|
{System::Main, "Dolphin"},
|
|
|
|
{System::GCPad, "GCPad"},
|
|
|
|
{System::WiiPad, "Wiimote"},
|
|
|
|
{System::GCKeyboard, "GCKeyboard"},
|
|
|
|
{System::GFX, "Graphics"},
|
|
|
|
{System::Logger, "Logger"},
|
|
|
|
{System::Debugger, "Debugger"},
|
|
|
|
{System::SYSCONF, "SYSCONF"},
|
NetPlay/Jit64: Avoid using software FMA
When I added the software FMA path in 2c38d64 and made us use
it when determinism is enabled, I was assuming that either the
performance impact of software FMA wouldn't be too large or CPUs
that were too old to have FMA instructions were too slow to run
Dolphin well anyway. This was wrong. To give an example, the
netplay performance went from 60 FPS to 30 FPS in one case.
This change makes netplay clients negotiate whether FMA should
be used. If all clients use an x64 CPU that supports FMA, or
AArch64, then FMA is enabled, and otherwise FMA is disabled.
In other words, we sacrifice accuracy if needed to avoid massive
slowdown, but not otherwise. When not using netplay, whether to
enable FMA is simply based on whether the host CPU supports it.
The only remaining case where the software FMA path gets used
under normal circumstances is when an input recording is created
on a CPU with FMA support and then played back on a CPU without.
This is not an especially common scenario (though it can happen),
and TASers are generally less picky about performance and more
picky about accuracy than other users anyway.
With this change, FMA desyncs are avoided between AArch64 and
modern x64 CPUs (unlike before 2c38d64), but we do get FMA
desyncs between AArch64 and old x64 CPUs (like before 2c38d64).
This desync can be avoided by adding a non-FMA path to JitArm64 as
an option, which I will wait with for another pull request so that
we can get the performance regression fixed as quickly as possible.
https://bugs.dolphin-emu.org/issues/12542
2021-06-09 18:16:41 +00:00
|
|
|
{System::DualShockUDPClient, "DualShockUDPClient"},
|
|
|
|
{System::FreeLook, "FreeLook"},
|
|
|
|
{System::Session, "Session"}};
|
2017-02-16 15:58:40 +00:00
|
|
|
|
|
|
|
const std::string& GetSystemName(System system)
|
|
|
|
{
|
|
|
|
return system_to_name.at(system);
|
|
|
|
}
|
|
|
|
|
2017-11-26 17:24:01 +00:00
|
|
|
std::optional<System> GetSystemFromName(const std::string& name)
|
2017-02-16 15:58:40 +00:00
|
|
|
{
|
|
|
|
const auto system = std::find_if(system_to_name.begin(), system_to_name.end(),
|
|
|
|
[&name](const auto& entry) { return entry.second == name; });
|
|
|
|
if (system != system_to_name.end())
|
|
|
|
return system->first;
|
|
|
|
|
2017-11-26 17:24:01 +00:00
|
|
|
return {};
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const std::string& GetLayerName(LayerType layer)
|
|
|
|
{
|
|
|
|
static const std::map<LayerType, std::string> layer_to_name = {
|
|
|
|
{LayerType::Base, "Base"},
|
|
|
|
{LayerType::GlobalGame, "Global GameINI"},
|
|
|
|
{LayerType::LocalGame, "Local GameINI"},
|
|
|
|
{LayerType::Netplay, "Netplay"},
|
|
|
|
{LayerType::Movie, "Movie"},
|
|
|
|
{LayerType::CommandLine, "Command Line"},
|
|
|
|
{LayerType::CurrentRun, "Current Run"},
|
|
|
|
};
|
|
|
|
return layer_to_name.at(layer);
|
|
|
|
}
|
2017-07-09 23:17:36 +00:00
|
|
|
|
2020-05-02 12:39:40 +00:00
|
|
|
LayerType GetActiveLayerForConfig(const Location& config)
|
2017-07-09 23:17:36 +00:00
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
ReadLock lock(s_layers_rw_lock);
|
|
|
|
|
2017-07-09 23:17:36 +00:00
|
|
|
for (auto layer : SEARCH_ORDER)
|
|
|
|
{
|
2019-07-30 14:40:52 +00:00
|
|
|
const auto it = s_layers.find(layer);
|
|
|
|
if (it != s_layers.end())
|
|
|
|
{
|
|
|
|
if (it->second->Exists(config))
|
|
|
|
return layer;
|
|
|
|
}
|
2017-07-09 23:17:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// If config is not present in any layer, base layer is considered active.
|
|
|
|
return LayerType::Base;
|
|
|
|
}
|
2019-03-03 16:58:37 +00:00
|
|
|
|
2020-11-25 15:26:13 +00:00
|
|
|
std::optional<std::string> GetAsString(const Location& config)
|
|
|
|
{
|
|
|
|
std::optional<std::string> result;
|
|
|
|
ReadLock lock(s_layers_rw_lock);
|
|
|
|
|
|
|
|
for (auto layer : SEARCH_ORDER)
|
|
|
|
{
|
|
|
|
const auto it = s_layers.find(layer);
|
|
|
|
if (it != s_layers.end())
|
|
|
|
{
|
|
|
|
result = it->second->Get<std::string>(config);
|
|
|
|
if (result.has_value())
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2019-03-03 16:58:37 +00:00
|
|
|
ConfigChangeCallbackGuard::ConfigChangeCallbackGuard()
|
|
|
|
{
|
|
|
|
++s_callback_guards;
|
2017-02-16 15:58:40 +00:00
|
|
|
}
|
2019-03-03 16:58:37 +00:00
|
|
|
|
|
|
|
ConfigChangeCallbackGuard::~ConfigChangeCallbackGuard()
|
|
|
|
{
|
|
|
|
if (--s_callback_guards)
|
|
|
|
return;
|
|
|
|
|
2020-12-05 17:24:41 +00:00
|
|
|
OnConfigChanged();
|
2019-03-03 16:58:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Config
|