module_cache_unittest.cc 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488
  1. // Copyright 2018 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include <iomanip>
  5. #include <map>
  6. #include <memory>
  7. #include <utility>
  8. #include <vector>
  9. #include "base/callback.h"
  10. #include "base/callback_helpers.h"
  11. #include "base/profiler/module_cache.h"
  12. #include "base/strings/string_piece.h"
  13. #include "base/test/bind.h"
  14. #include "build/build_config.h"
  15. #include "testing/gtest/include/gtest/gtest.h"
  16. #if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || BUILDFLAG(IS_ANDROID)
  17. #include "base/debug/proc_maps_linux.h"
  18. #endif
  19. namespace base {
  20. namespace {
  21. int AFunctionForTest() {
  22. return 42;
  23. }
  24. // Provides a module that is guaranteed to be isolated from (and non-contiguous
  25. // with) any other module, by placing the module in the middle of a block of
  26. // heap memory.
  27. class IsolatedModule : public ModuleCache::Module {
  28. public:
  29. explicit IsolatedModule(bool is_native = true)
  30. : is_native_(is_native), memory_region_(new char[kRegionSize]) {}
  31. // ModuleCache::Module
  32. uintptr_t GetBaseAddress() const override {
  33. // Place the module in the middle of the region.
  34. return reinterpret_cast<uintptr_t>(&memory_region_[kRegionSize / 4]);
  35. }
  36. std::string GetId() const override { return ""; }
  37. FilePath GetDebugBasename() const override { return FilePath(); }
  38. size_t GetSize() const override { return kRegionSize / 2; }
  39. bool IsNative() const override { return is_native_; }
  40. private:
  41. static const int kRegionSize = 100;
  42. bool is_native_;
  43. std::unique_ptr<char[]> memory_region_;
  44. };
  45. // Provides a fake module with configurable base address and size.
  46. class FakeModule : public ModuleCache::Module {
  47. public:
  48. FakeModule(uintptr_t base_address,
  49. size_t size,
  50. bool is_native = true,
  51. OnceClosure destruction_closure = OnceClosure())
  52. : base_address_(base_address),
  53. size_(size),
  54. is_native_(is_native),
  55. destruction_closure_runner_(std::move(destruction_closure)) {}
  56. FakeModule(const FakeModule&) = delete;
  57. FakeModule& operator=(const FakeModule&) = delete;
  58. uintptr_t GetBaseAddress() const override { return base_address_; }
  59. std::string GetId() const override { return ""; }
  60. FilePath GetDebugBasename() const override { return FilePath(); }
  61. size_t GetSize() const override { return size_; }
  62. bool IsNative() const override { return is_native_; }
  63. private:
  64. uintptr_t base_address_;
  65. size_t size_;
  66. bool is_native_;
  67. ScopedClosureRunner destruction_closure_runner_;
  68. };
  69. // Utility function to add a single non-native module during test setup. Returns
  70. // a pointer to the provided module.
  71. const ModuleCache::Module* AddNonNativeModule(
  72. ModuleCache* cache,
  73. std::unique_ptr<const ModuleCache::Module> module) {
  74. const ModuleCache::Module* module_ptr = module.get();
  75. std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
  76. modules.push_back(std::move(module));
  77. cache->UpdateNonNativeModules({}, std::move(modules));
  78. return module_ptr;
  79. }
  80. #if (BUILDFLAG(IS_POSIX) && !BUILDFLAG(IS_IOS) && !defined(ARCH_CPU_ARM64)) || \
  81. (BUILDFLAG(IS_FUCHSIA) && !defined(ARCH_CPU_ARM64)) || BUILDFLAG(IS_WIN)
  82. #define MAYBE_TEST(TestSuite, TestName) TEST(TestSuite, TestName)
  83. #else
  84. #define MAYBE_TEST(TestSuite, TestName) TEST(TestSuite, DISABLED_##TestName)
  85. #endif
  86. MAYBE_TEST(ModuleCacheTest, GetDebugBasename) {
  87. ModuleCache cache;
  88. const ModuleCache::Module* module =
  89. cache.GetModuleForAddress(reinterpret_cast<uintptr_t>(&AFunctionForTest));
  90. ASSERT_NE(nullptr, module);
  91. #if BUILDFLAG(IS_ANDROID)
  92. EXPECT_EQ("libbase_unittests__library",
  93. // Different build configurations varyingly use .so vs. .cr.so for
  94. // the module extension. Remove all the extensions in both cases.
  95. module->GetDebugBasename()
  96. .RemoveFinalExtension()
  97. .RemoveFinalExtension()
  98. .value());
  99. #elif BUILDFLAG(IS_POSIX)
  100. EXPECT_EQ("base_unittests", module->GetDebugBasename().value());
  101. #elif BUILDFLAG(IS_WIN)
  102. EXPECT_EQ(L"base_unittests.exe.pdb", module->GetDebugBasename().value());
  103. #endif
  104. }
  105. // Checks that ModuleCache returns the same module instance for
  106. // addresses within the module.
  107. MAYBE_TEST(ModuleCacheTest, LookupCodeAddresses) {
  108. uintptr_t ptr1 = reinterpret_cast<uintptr_t>(&AFunctionForTest);
  109. uintptr_t ptr2 = ptr1 + 1;
  110. ModuleCache cache;
  111. const ModuleCache::Module* module1 = cache.GetModuleForAddress(ptr1);
  112. const ModuleCache::Module* module2 = cache.GetModuleForAddress(ptr2);
  113. EXPECT_EQ(module1, module2);
  114. EXPECT_NE(nullptr, module1);
  115. EXPECT_GT(module1->GetSize(), 0u);
  116. EXPECT_LE(module1->GetBaseAddress(), ptr1);
  117. EXPECT_GT(module1->GetBaseAddress() + module1->GetSize(), ptr2);
  118. }
  119. MAYBE_TEST(ModuleCacheTest, LookupRange) {
  120. ModuleCache cache;
  121. auto to_inject = std::make_unique<IsolatedModule>();
  122. const ModuleCache::Module* module = to_inject.get();
  123. cache.AddCustomNativeModule(std::move(to_inject));
  124. EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() - 1));
  125. EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress()));
  126. EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress() +
  127. module->GetSize() - 1));
  128. EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() +
  129. module->GetSize()));
  130. }
  131. MAYBE_TEST(ModuleCacheTest, LookupNonNativeModule) {
  132. ModuleCache cache;
  133. const ModuleCache::Module* module =
  134. AddNonNativeModule(&cache, std::make_unique<IsolatedModule>(false));
  135. EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() - 1));
  136. EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress()));
  137. EXPECT_EQ(module, cache.GetModuleForAddress(module->GetBaseAddress() +
  138. module->GetSize() - 1));
  139. EXPECT_EQ(nullptr, cache.GetModuleForAddress(module->GetBaseAddress() +
  140. module->GetSize()));
  141. }
  142. MAYBE_TEST(ModuleCacheTest, LookupOverlaidNonNativeModule) {
  143. ModuleCache cache;
  144. auto native_module_to_inject = std::make_unique<IsolatedModule>();
  145. const ModuleCache::Module* native_module = native_module_to_inject.get();
  146. cache.AddCustomNativeModule(std::move(native_module_to_inject));
  147. // Overlay the native module with the non-native module, starting 8 bytes into
  148. // the native modules and ending 8 bytes before the end of the module.
  149. const ModuleCache::Module* non_native_module = AddNonNativeModule(
  150. &cache,
  151. std::make_unique<FakeModule>(native_module->GetBaseAddress() + 8,
  152. native_module->GetSize() - 16, false));
  153. EXPECT_EQ(native_module,
  154. cache.GetModuleForAddress(non_native_module->GetBaseAddress() - 1));
  155. EXPECT_EQ(non_native_module,
  156. cache.GetModuleForAddress(non_native_module->GetBaseAddress()));
  157. EXPECT_EQ(non_native_module,
  158. cache.GetModuleForAddress(non_native_module->GetBaseAddress() +
  159. non_native_module->GetSize() - 1));
  160. EXPECT_EQ(native_module,
  161. cache.GetModuleForAddress(non_native_module->GetBaseAddress() +
  162. non_native_module->GetSize()));
  163. }
  164. MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesAdd) {
  165. ModuleCache cache;
  166. std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
  167. modules.push_back(std::make_unique<FakeModule>(1, 1, false));
  168. const ModuleCache::Module* module = modules.back().get();
  169. cache.UpdateNonNativeModules({}, std::move(modules));
  170. EXPECT_EQ(module, cache.GetModuleForAddress(1));
  171. }
  172. MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesRemove) {
  173. ModuleCache cache;
  174. std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
  175. modules.push_back(std::make_unique<FakeModule>(1, 1, false));
  176. const ModuleCache::Module* module = modules.back().get();
  177. cache.UpdateNonNativeModules({}, std::move(modules));
  178. cache.UpdateNonNativeModules({module}, {});
  179. EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
  180. }
  181. MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesRemoveModuleIsNotDestroyed) {
  182. bool was_destroyed = false;
  183. {
  184. ModuleCache cache;
  185. std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
  186. modules.push_back(std::make_unique<FakeModule>(
  187. 1, 1, false,
  188. BindLambdaForTesting([&was_destroyed]() { was_destroyed = true; })));
  189. const ModuleCache::Module* module = modules.back().get();
  190. cache.UpdateNonNativeModules({}, std::move(modules));
  191. cache.UpdateNonNativeModules({module}, {});
  192. EXPECT_FALSE(was_destroyed);
  193. }
  194. EXPECT_TRUE(was_destroyed);
  195. }
  196. // Regression test to validate that when modules are partitioned into modules to
  197. // keep and modules to remove, the modules to remove are not destroyed.
  198. // https://crbug.com/1127466 case 2.
  199. MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesPartitioning) {
  200. int destroyed_count = 0;
  201. const auto record_destroyed = [&destroyed_count]() { ++destroyed_count; };
  202. {
  203. ModuleCache cache;
  204. std::vector<std::unique_ptr<const ModuleCache::Module>> modules;
  205. modules.push_back(std::make_unique<FakeModule>(
  206. 1, 1, false, BindLambdaForTesting(record_destroyed)));
  207. const ModuleCache::Module* module1 = modules.back().get();
  208. modules.push_back(std::make_unique<FakeModule>(
  209. 2, 1, false, BindLambdaForTesting(record_destroyed)));
  210. cache.UpdateNonNativeModules({}, std::move(modules));
  211. cache.UpdateNonNativeModules({module1}, {});
  212. EXPECT_EQ(0, destroyed_count);
  213. }
  214. EXPECT_EQ(2, destroyed_count);
  215. }
  216. MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesReplace) {
  217. ModuleCache cache;
  218. // Replace a module with another larger module at the same base address.
  219. std::vector<std::unique_ptr<const ModuleCache::Module>> modules1;
  220. modules1.push_back(std::make_unique<FakeModule>(1, 1, false));
  221. const ModuleCache::Module* module1 = modules1.back().get();
  222. std::vector<std::unique_ptr<const ModuleCache::Module>> modules2;
  223. modules2.push_back(std::make_unique<FakeModule>(1, 2, false));
  224. const ModuleCache::Module* module2 = modules2.back().get();
  225. cache.UpdateNonNativeModules({}, std::move(modules1));
  226. cache.UpdateNonNativeModules({module1}, std::move(modules2));
  227. EXPECT_EQ(module2, cache.GetModuleForAddress(2));
  228. }
  229. MAYBE_TEST(ModuleCacheTest,
  230. UpdateNonNativeModulesMultipleRemovedModulesAtSameAddress) {
  231. int destroyed_count = 0;
  232. const auto record_destroyed = [&destroyed_count]() { ++destroyed_count; };
  233. ModuleCache cache;
  234. // Checks that non-native modules can be repeatedly added and removed at the
  235. // same addresses, and that all are retained in the cache.
  236. std::vector<std::unique_ptr<const ModuleCache::Module>> modules1;
  237. modules1.push_back(std::make_unique<FakeModule>(
  238. 1, 1, false, BindLambdaForTesting(record_destroyed)));
  239. const ModuleCache::Module* module1 = modules1.back().get();
  240. std::vector<std::unique_ptr<const ModuleCache::Module>> modules2;
  241. modules2.push_back(std::make_unique<FakeModule>(
  242. 1, 1, false, BindLambdaForTesting(record_destroyed)));
  243. const ModuleCache::Module* module2 = modules2.back().get();
  244. cache.UpdateNonNativeModules({}, std::move(modules1));
  245. cache.UpdateNonNativeModules({module1}, std::move(modules2));
  246. cache.UpdateNonNativeModules({module2}, {});
  247. EXPECT_EQ(0, destroyed_count);
  248. }
  249. MAYBE_TEST(ModuleCacheTest, UpdateNonNativeModulesCorrectModulesRemoved) {
  250. ModuleCache cache;
  251. std::vector<std::unique_ptr<const ModuleCache::Module>> to_add;
  252. for (int i = 0; i < 5; ++i) {
  253. to_add.push_back(std::make_unique<FakeModule>(i + 1, 1, false));
  254. }
  255. std::vector<const ModuleCache::Module*> to_remove = {to_add[1].get(),
  256. to_add[3].get()};
  257. // Checks that the correct modules are removed when removing some but not all
  258. // modules.
  259. cache.UpdateNonNativeModules({}, std::move(to_add));
  260. cache.UpdateNonNativeModules({to_remove}, {});
  261. DCHECK_NE(nullptr, cache.GetModuleForAddress(1));
  262. DCHECK_EQ(nullptr, cache.GetModuleForAddress(2));
  263. DCHECK_NE(nullptr, cache.GetModuleForAddress(3));
  264. DCHECK_EQ(nullptr, cache.GetModuleForAddress(4));
  265. DCHECK_NE(nullptr, cache.GetModuleForAddress(5));
  266. }
  267. MAYBE_TEST(ModuleCacheTest, ModulesList) {
  268. ModuleCache cache;
  269. uintptr_t ptr = reinterpret_cast<uintptr_t>(&AFunctionForTest);
  270. const ModuleCache::Module* native_module = cache.GetModuleForAddress(ptr);
  271. const ModuleCache::Module* non_native_module =
  272. AddNonNativeModule(&cache, std::make_unique<FakeModule>(1, 2, false));
  273. EXPECT_NE(nullptr, native_module);
  274. std::vector<const ModuleCache::Module*> modules = cache.GetModules();
  275. ASSERT_EQ(2u, modules.size());
  276. EXPECT_EQ(native_module, modules[0]);
  277. EXPECT_EQ(non_native_module, modules[1]);
  278. }
  279. MAYBE_TEST(ModuleCacheTest, InvalidModule) {
  280. ModuleCache cache;
  281. EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
  282. }
  283. // arm64 module support is not implemented.
  284. #if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS) || \
  285. (BUILDFLAG(IS_ANDROID) && !defined(ARCH_CPU_ARM64))
  286. // Validates that, for the memory regions listed in /proc/self/maps, the modules
  287. // found via ModuleCache are consistent with those regions' extents.
  288. TEST(ModuleCacheTest, CheckAgainstProcMaps) {
  289. std::string proc_maps;
  290. debug::ReadProcMaps(&proc_maps);
  291. std::vector<debug::MappedMemoryRegion> regions;
  292. ASSERT_TRUE(debug::ParseProcMaps(proc_maps, &regions));
  293. // Map distinct paths to lists of regions for the path in increasing memory
  294. // order.
  295. using RegionVector = std::vector<const debug::MappedMemoryRegion*>;
  296. using PathRegionsMap = std::map<StringPiece, RegionVector>;
  297. PathRegionsMap path_regions;
  298. for (const debug::MappedMemoryRegion& region : regions)
  299. path_regions[region.path].push_back(&region);
  300. const auto find_last_executable_region = [](const RegionVector& regions) {
  301. const auto rloc = std::find_if(
  302. regions.rbegin(), regions.rend(),
  303. [](const debug::MappedMemoryRegion* region) {
  304. return static_cast<bool>(region->permissions &
  305. debug::MappedMemoryRegion::EXECUTE);
  306. });
  307. return rloc == regions.rend() ? nullptr : *rloc;
  308. };
  309. int module_count = 0;
  310. // Loop through each distinct path.
  311. for (const auto& path_regions_pair : path_regions) {
  312. // Regions that aren't associated with absolute paths are unlikely to be
  313. // part of modules.
  314. if (path_regions_pair.first.empty() || path_regions_pair.first[0] != '/')
  315. continue;
  316. const debug::MappedMemoryRegion* const last_executable_region =
  317. find_last_executable_region(path_regions_pair.second);
  318. // The region isn't part of a module if no executable regions are associated
  319. // with the same path.
  320. if (!last_executable_region)
  321. continue;
  322. // Loop through all the regions associated with the path, checking that
  323. // modules created for addresses in each region have the expected extents.
  324. const uintptr_t expected_base_address =
  325. path_regions_pair.second.front()->start;
  326. for (const auto* region : path_regions_pair.second) {
  327. ModuleCache cache;
  328. const ModuleCache::Module* module =
  329. cache.GetModuleForAddress(region->start);
  330. // Not all regions matching the prior conditions are necessarily modules;
  331. // things like resources are also mmapped into memory from files. Ignore
  332. // any region isn't part of a module.
  333. if (!module)
  334. continue;
  335. ++module_count;
  336. EXPECT_EQ(expected_base_address, module->GetBaseAddress());
  337. // This needs an inequality comparison because the module size is computed
  338. // based on the ELF section's actual extent, while the |proc_maps| region
  339. // is aligned to a larger boundary.
  340. EXPECT_LE(module->GetSize(),
  341. last_executable_region->end - expected_base_address)
  342. << "base address: " << std::hex << module->GetBaseAddress()
  343. << std::endl
  344. << "region start: " << std::hex << region->start << std::endl
  345. << "region end: " << std::hex << region->end << std::endl;
  346. }
  347. }
  348. // Linux should have at least this module and ld-linux.so. Android should have
  349. // at least this module and system libraries.
  350. EXPECT_GE(module_count, 2);
  351. }
  352. #endif
  353. // Module provider that always return a fake module of size 1 for a given
  354. // |address|.
  355. class MockModuleProvider : public ModuleCache::AuxiliaryModuleProvider {
  356. public:
  357. explicit MockModuleProvider(size_t module_size = 1)
  358. : module_size_(module_size) {}
  359. std::unique_ptr<const ModuleCache::Module> TryCreateModuleForAddress(
  360. uintptr_t address) override {
  361. return std::make_unique<FakeModule>(address, module_size_);
  362. }
  363. private:
  364. size_t module_size_;
  365. };
  366. // Check that auxiliary provider can inject new modules when registered.
  367. TEST(ModuleCacheTest, RegisterAuxiliaryModuleProvider) {
  368. ModuleCache cache;
  369. EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
  370. MockModuleProvider auxiliary_provider;
  371. cache.RegisterAuxiliaryModuleProvider(&auxiliary_provider);
  372. auto* module = cache.GetModuleForAddress(1);
  373. EXPECT_NE(nullptr, module);
  374. EXPECT_EQ(1U, module->GetBaseAddress());
  375. cache.UnregisterAuxiliaryModuleProvider(&auxiliary_provider);
  376. // Even when unregistered, the module remains in the cache.
  377. EXPECT_EQ(module, cache.GetModuleForAddress(1));
  378. }
  379. // Check that ModuleCache's own module creator is used preferentially over
  380. // auxiliary provider if possible.
  381. MAYBE_TEST(ModuleCacheTest, NativeModuleOverAuxiliaryModuleProvider) {
  382. ModuleCache cache;
  383. MockModuleProvider auxiliary_provider(/*module_size=*/100);
  384. cache.RegisterAuxiliaryModuleProvider(&auxiliary_provider);
  385. const ModuleCache::Module* module =
  386. cache.GetModuleForAddress(reinterpret_cast<uintptr_t>(&AFunctionForTest));
  387. ASSERT_NE(nullptr, module);
  388. // The module should be a native module, which will have size greater than 100
  389. // bytes.
  390. EXPECT_NE(100u, module->GetSize());
  391. cache.UnregisterAuxiliaryModuleProvider(&auxiliary_provider);
  392. }
  393. // Check that auxiliary provider is no longer used after being unregistered.
  394. TEST(ModuleCacheTest, UnregisterAuxiliaryModuleProvider) {
  395. ModuleCache cache;
  396. EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
  397. MockModuleProvider auxiliary_provider;
  398. cache.RegisterAuxiliaryModuleProvider(&auxiliary_provider);
  399. cache.UnregisterAuxiliaryModuleProvider(&auxiliary_provider);
  400. EXPECT_EQ(nullptr, cache.GetModuleForAddress(1));
  401. }
  402. #if BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
  403. TEST(ModuleCacheTest, TransformELFModuleIDToBreakpadFormat) {
  404. // See explanation for the module_id mangling in
  405. // base::TransformModuleIDToBreakpadFormat implementation.
  406. EXPECT_EQ(TransformModuleIDToBreakpadFormat(
  407. "7F0715C286F8B16C10E4AD349CDA3B9B56C7A773"),
  408. "C215077FF8866CB110E4AD349CDA3B9B0");
  409. }
  410. #endif
  411. } // namespace
  412. } // namespace base