[CP-stable][Impeller] Maintain a global map of each context's currently active thread-local command pools (#170013)

This pull request is created by [automatic cherry pick workflow](https://github.com/flutter/flutter/blob/main/docs/releases/Flutter-Cherrypick-Process.md#automatically-creates-a-cherry-pick-request)
Please fill in the form below, and a flutter domain expert will evaluate this cherry pick request.

### Issue Link:
What is the link to the issue this cherry-pick is addressing?

https://github.com/flutter/flutter/issues/169208

### Changelog Description:
Explain this cherry pick in one line that is accessible to most Flutter developers. See [best practices](https://github.com/flutter/flutter/blob/main/docs/releases/Hotfix-Documentation-Best-Practices.md) for examples

Fixes a memory leak in the Impeller Vulkan back end.

### Impact Description:
What is the impact (ex. visual jank on Samsung phones, app crash, cannot ship an iOS app)? Does it impact development (ex. flutter doctor crashes when Android Studio is installed), or the shipping production app (the app crashes on launch)

The memory usage of apps using Impeller/Vulkan will increase as frames are rendered.  Memory consumption will grow until the Android activity enters the stopped state.

### Workaround:
Is there a workaround for this issue?

Disabling Impeller

### Risk:
What is the risk level of this cherry-pick?

  - [ x ] Medium

### Test Coverage:
Are you confident that your fix is well-tested by automated tests?

  - [ x ] Yes

### Validation Steps:
What are the steps to validate that this fix works?

Start an app using Impeller/Vulkan that renders frames nonstop (for example, video playback).  Leave it running for several minutes.  Check memory metrics with a tool like `adb shell dumpsys meminfo` and verify that memory usage is stable.
This commit is contained in:
flutteractionsbot 2025-06-05 08:39:10 -07:00 committed by GitHub
parent 46a4c05ace
commit 31c4875c7a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 70 additions and 27 deletions

View File

@ -132,6 +132,11 @@ void GoldenPlaygroundTest::SetTypographerContext(
void GoldenPlaygroundTest::TearDown() { void GoldenPlaygroundTest::TearDown() {
ASSERT_FALSE(dlopen("/usr/local/lib/libMoltenVK.dylib", RTLD_NOLOAD)); ASSERT_FALSE(dlopen("/usr/local/lib/libMoltenVK.dylib", RTLD_NOLOAD));
auto context = GetContext();
if (context) {
context->DisposeThreadLocalCachedResources();
}
} }
namespace { namespace {
@ -274,6 +279,9 @@ RuntimeStage::Map GoldenPlaygroundTest::OpenAssetAsRuntimeStage(
} }
std::shared_ptr<Context> GoldenPlaygroundTest::GetContext() const { std::shared_ptr<Context> GoldenPlaygroundTest::GetContext() const {
if (!pimpl_->screenshotter) {
return nullptr;
}
return pimpl_->screenshotter->GetPlayground().GetContext(); return pimpl_->screenshotter->GetPlayground().GetContext();
} }

View File

@ -171,10 +171,23 @@ static thread_local std::unique_ptr<CommandPoolMap> tls_command_pool_map;
// with that context. // with that context.
static Mutex g_all_pools_map_mutex; static Mutex g_all_pools_map_mutex;
static std::unordered_map< static std::unordered_map<
const ContextVK*, uint64_t,
std::vector<std::weak_ptr<CommandPoolVK>>> g_all_pools_map std::unordered_map<std::thread::id,
std::weak_ptr<CommandPoolVK>>> g_all_pools_map
IPLR_GUARDED_BY(g_all_pools_map_mutex); IPLR_GUARDED_BY(g_all_pools_map_mutex);
CommandPoolRecyclerVK::CommandPoolRecyclerVK(
const std::shared_ptr<ContextVK>& context)
: context_(context), context_hash_(context->GetHash()) {}
// Visible for testing.
// Returns the number of pools in g_all_pools_map for the given context.
int CommandPoolRecyclerVK::GetGlobalPoolCount(const ContextVK& context) {
Lock all_pools_lock(g_all_pools_map_mutex);
auto it = g_all_pools_map.find(context.GetHash());
return it != g_all_pools_map.end() ? it->second.size() : 0;
}
// TODO(matanlurey): Return a status_or<> instead of nullptr when we have one. // TODO(matanlurey): Return a status_or<> instead of nullptr when we have one.
std::shared_ptr<CommandPoolVK> CommandPoolRecyclerVK::Get() { std::shared_ptr<CommandPoolVK> CommandPoolRecyclerVK::Get() {
auto const strong_context = context_.lock(); auto const strong_context = context_.lock();
@ -187,8 +200,7 @@ std::shared_ptr<CommandPoolVK> CommandPoolRecyclerVK::Get() {
tls_command_pool_map.reset(new CommandPoolMap()); tls_command_pool_map.reset(new CommandPoolMap());
} }
CommandPoolMap& pool_map = *tls_command_pool_map.get(); CommandPoolMap& pool_map = *tls_command_pool_map.get();
auto const hash = strong_context->GetHash(); auto const it = pool_map.find(context_hash_);
auto const it = pool_map.find(hash);
if (it != pool_map.end()) { if (it != pool_map.end()) {
return it->second; return it->second;
} }
@ -201,11 +213,11 @@ std::shared_ptr<CommandPoolVK> CommandPoolRecyclerVK::Get() {
auto const resource = std::make_shared<CommandPoolVK>( auto const resource = std::make_shared<CommandPoolVK>(
std::move(data->pool), std::move(data->buffers), context_); std::move(data->pool), std::move(data->buffers), context_);
pool_map.emplace(hash, resource); pool_map.emplace(context_hash_, resource);
{ {
Lock all_pools_lock(g_all_pools_map_mutex); Lock all_pools_lock(g_all_pools_map_mutex);
g_all_pools_map[strong_context.get()].push_back(resource); g_all_pools_map[context_hash_][std::this_thread::get_id()] = resource;
} }
return resource; return resource;
@ -275,30 +287,33 @@ void CommandPoolRecyclerVK::Reclaim(
RecycledData{.pool = std::move(pool), .buffers = std::move(buffers)}); RecycledData{.pool = std::move(pool), .buffers = std::move(buffers)});
} }
CommandPoolRecyclerVK::~CommandPoolRecyclerVK() {
// Ensure all recycled pools are reclaimed before this is destroyed.
Dispose();
}
void CommandPoolRecyclerVK::Dispose() { void CommandPoolRecyclerVK::Dispose() {
CommandPoolMap* pool_map = tls_command_pool_map.get(); CommandPoolMap* pool_map = tls_command_pool_map.get();
if (pool_map) { if (pool_map) {
pool_map->clear(); pool_map->erase(context_hash_);
}
{
Lock all_pools_lock(g_all_pools_map_mutex);
auto found = g_all_pools_map.find(context_hash_);
if (found != g_all_pools_map.end()) {
found->second.erase(std::this_thread::get_id());
}
} }
} }
void CommandPoolRecyclerVK::DestroyThreadLocalPools(const ContextVK* context) { void CommandPoolRecyclerVK::DestroyThreadLocalPools() {
// Delete the context's entry in this thread's command pool map. // Delete the context's entry in this thread's command pool map.
if (tls_command_pool_map.get()) { if (tls_command_pool_map.get()) {
tls_command_pool_map.get()->erase(context->GetHash()); tls_command_pool_map.get()->erase(context_hash_);
} }
// Destroy all other thread-local CommandPoolVK instances associated with // Destroy all other thread-local CommandPoolVK instances associated with
// this context. // this context.
Lock all_pools_lock(g_all_pools_map_mutex); Lock all_pools_lock(g_all_pools_map_mutex);
auto found = g_all_pools_map.find(context); auto found = g_all_pools_map.find(context_hash_);
if (found != g_all_pools_map.end()) { if (found != g_all_pools_map.end()) {
for (auto& weak_pool : found->second) { for (auto& [thread_id, weak_pool] : found->second) {
auto pool = weak_pool.lock(); auto pool = weak_pool.lock();
if (!pool) { if (!pool) {
continue; continue;

View File

@ -103,8 +103,6 @@ class CommandPoolVK final {
class CommandPoolRecyclerVK final class CommandPoolRecyclerVK final
: public std::enable_shared_from_this<CommandPoolRecyclerVK> { : public std::enable_shared_from_this<CommandPoolRecyclerVK> {
public: public:
~CommandPoolRecyclerVK();
/// A unique command pool and zero or more recycled command buffers. /// A unique command pool and zero or more recycled command buffers.
struct RecycledData { struct RecycledData {
vk::UniqueCommandPool pool; vk::UniqueCommandPool pool;
@ -112,16 +110,13 @@ class CommandPoolRecyclerVK final
}; };
/// @brief Clean up resources held by all per-thread command pools /// @brief Clean up resources held by all per-thread command pools
/// associated with the given context. /// associated with the context.
/// void DestroyThreadLocalPools();
/// @param[in] context The context.
static void DestroyThreadLocalPools(const ContextVK* context);
/// @brief Creates a recycler for the given |ContextVK|. /// @brief Creates a recycler for the given |ContextVK|.
/// ///
/// @param[in] context The context to create the recycler for. /// @param[in] context The context to create the recycler for.
explicit CommandPoolRecyclerVK(std::weak_ptr<ContextVK> context) explicit CommandPoolRecyclerVK(const std::shared_ptr<ContextVK>& context);
: context_(std::move(context)) {}
/// @brief Gets a command pool for the current thread. /// @brief Gets a command pool for the current thread.
/// ///
@ -137,11 +132,15 @@ class CommandPoolRecyclerVK final
std::vector<vk::UniqueCommandBuffer>&& buffers, std::vector<vk::UniqueCommandBuffer>&& buffers,
bool should_trim = false); bool should_trim = false);
/// @brief Clears all recycled command pools to let them be reclaimed. /// @brief Clears this context's thread-local command pool.
void Dispose(); void Dispose();
// Visible for testing.
static int GetGlobalPoolCount(const ContextVK& context);
private: private:
std::weak_ptr<ContextVK> context_; std::weak_ptr<ContextVK> context_;
uint64_t context_hash_;
Mutex recycled_mutex_; Mutex recycled_mutex_;
std::vector<RecycledData> recycled_ IPLR_GUARDED_BY(recycled_mutex_); std::vector<RecycledData> recycled_ IPLR_GUARDED_BY(recycled_mutex_);

View File

@ -228,5 +228,24 @@ TEST(CommandPoolRecyclerVKTest, ExtraCommandBufferAllocationsTriggerTrim) {
context->Shutdown(); context->Shutdown();
} }
TEST(CommandPoolRecyclerVKTest, RecyclerGlobalPoolMapSize) {
auto context = MockVulkanContextBuilder().Build();
auto const recycler = context->GetCommandPoolRecycler();
// The global pool list for this context should initially be empty.
EXPECT_EQ(CommandPoolRecyclerVK::GetGlobalPoolCount(*context), 0);
// Creating a pool for this thread should insert the pool into the global map.
auto pool = recycler->Get();
EXPECT_EQ(CommandPoolRecyclerVK::GetGlobalPoolCount(*context), 1);
// Disposing this thread's pool should remove it from the global map.
pool.reset();
recycler->Dispose();
EXPECT_EQ(CommandPoolRecyclerVK::GetGlobalPoolCount(*context), 0);
context->Shutdown();
}
} // namespace testing } // namespace testing
} // namespace impeller } // namespace impeller

View File

@ -134,7 +134,9 @@ ContextVK::~ContextVK() {
if (device_holder_ && device_holder_->device) { if (device_holder_ && device_holder_->device) {
[[maybe_unused]] auto result = device_holder_->device->waitIdle(); [[maybe_unused]] auto result = device_holder_->device->waitIdle();
} }
CommandPoolRecyclerVK::DestroyThreadLocalPools(this); if (command_pool_recycler_) {
command_pool_recycler_->DestroyThreadLocalPools();
}
} }
Context::BackendType ContextVK::GetBackendType() const { Context::BackendType ContextVK::GetBackendType() const {
@ -421,7 +423,7 @@ void ContextVK::Setup(Settings settings) {
} }
auto command_pool_recycler = auto command_pool_recycler =
std::make_shared<CommandPoolRecyclerVK>(weak_from_this()); std::make_shared<CommandPoolRecyclerVK>(shared_from_this());
if (!command_pool_recycler) { if (!command_pool_recycler) {
VALIDATION_LOG << "Could not create command pool recycler."; VALIDATION_LOG << "Could not create command pool recycler.";
return; return;