raw_ptr_unittest.cc 76 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084
  1. // Copyright 2020 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "base/memory/raw_ptr.h"
  5. #include <climits>
  6. #include <string>
  7. #include <tuple>
  8. #include <type_traits>
  9. #include <utility>
  10. #include "base/allocator/buildflags.h"
  11. #include "base/allocator/partition_alloc_features.h"
  12. #include "base/allocator/partition_alloc_support.h"
  13. #include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
  14. #include "base/allocator/partition_allocator/partition_alloc.h"
  15. #include "base/allocator/partition_allocator/partition_alloc_config.h"
  16. #include "base/allocator/partition_allocator/partition_alloc_constants.h"
  17. #include "base/allocator/partition_allocator/tagging.h"
  18. #include "base/cpu.h"
  19. #include "base/logging.h"
  20. #include "base/memory/raw_ptr_asan_service.h"
  21. #include "base/test/scoped_feature_list.h"
  22. #include "build/build_config.h"
  23. #include "build/buildflag.h"
  24. #include "testing/gmock/include/gmock/gmock.h"
  25. #include "testing/gtest/include/gtest/gtest.h"
  26. #include "third_party/abseil-cpp/absl/types/optional.h"
  27. #if BUILDFLAG(ENABLE_BASE_TRACING)
  28. #include "third_party/perfetto/include/perfetto/test/traced_value_test_support.h" // no-presubmit-check nogncheck
  29. #endif // BUILDFLAG(ENABLE_BASE_TRACING)
  30. #if defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  31. #include "base/allocator/partition_allocator/partition_tag_types.h"
  32. #endif // defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  33. #if BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
  34. #include <sanitizer/asan_interface.h>
  35. #endif
  36. using testing::AllOf;
  37. using testing::HasSubstr;
  38. using testing::Test;
  39. static_assert(sizeof(raw_ptr<void>) == sizeof(void*),
  40. "raw_ptr shouldn't add memory overhead");
  41. static_assert(sizeof(raw_ptr<int>) == sizeof(int*),
  42. "raw_ptr shouldn't add memory overhead");
  43. static_assert(sizeof(raw_ptr<std::string>) == sizeof(std::string*),
  44. "raw_ptr shouldn't add memory overhead");
  45. #if !BUILDFLAG(USE_BACKUP_REF_PTR)
  46. // |is_trivially_copyable| assertion means that arrays/vectors of raw_ptr can
  47. // be copied by memcpy.
  48. static_assert(std::is_trivially_copyable<raw_ptr<void>>::value,
  49. "raw_ptr should be trivially copyable");
  50. static_assert(std::is_trivially_copyable<raw_ptr<int>>::value,
  51. "raw_ptr should be trivially copyable");
  52. static_assert(std::is_trivially_copyable<raw_ptr<std::string>>::value,
  53. "raw_ptr should be trivially copyable");
  54. // |is_trivially_default_constructible| assertion helps retain implicit default
  55. // constructors when raw_ptr is used as a union field. Example of an error
  56. // if this assertion didn't hold:
  57. //
  58. // ../../base/trace_event/trace_arguments.h:249:16: error: call to
  59. // implicitly-deleted default constructor of 'base::trace_event::TraceValue'
  60. // TraceValue ret;
  61. // ^
  62. // ../../base/trace_event/trace_arguments.h:211:26: note: default
  63. // constructor of 'TraceValue' is implicitly deleted because variant field
  64. // 'as_pointer' has a non-trivial default constructor
  65. // raw_ptr<const void> as_pointer;
  66. static_assert(std::is_trivially_default_constructible<raw_ptr<void>>::value,
  67. "raw_ptr should be trivially default constructible");
  68. static_assert(std::is_trivially_default_constructible<raw_ptr<int>>::value,
  69. "raw_ptr should be trivially default constructible");
  70. static_assert(
  71. std::is_trivially_default_constructible<raw_ptr<std::string>>::value,
  72. "raw_ptr should be trivially default constructible");
  73. #endif // !BUILDFLAG(USE_BACKUP_REF_PTR)
  74. // Don't use base::internal for testing raw_ptr API, to test if code outside
  75. // this namespace calls the correct functions from this namespace.
  76. namespace {
  77. using RawPtrCountingImpl =
  78. base::internal::RawPtrCountingImplWrapperForTest<base::DefaultRawPtrImpl>;
  79. using RawPtrCountingMayDangleImpl =
  80. base::internal::RawPtrCountingImplWrapperForTest<base::RawPtrMayDangle>;
  81. template <typename T>
  82. using CountingRawPtr = raw_ptr<T, RawPtrCountingImpl>;
  83. template <typename T>
  84. using CountingRawPtrMayDangle = raw_ptr<T, RawPtrCountingMayDangleImpl>;
  85. struct MyStruct {
  86. int x;
  87. };
  88. struct Base1 {
  89. explicit Base1(int b1) : b1(b1) {}
  90. int b1;
  91. };
  92. struct Base2 {
  93. explicit Base2(int b2) : b2(b2) {}
  94. int b2;
  95. };
  96. struct Derived : Base1, Base2 {
  97. Derived(int b1, int b2, int d) : Base1(b1), Base2(b2), d(d) {}
  98. int d;
  99. };
  100. class RawPtrTest : public Test {
  101. protected:
  102. void SetUp() override {
  103. RawPtrCountingImpl::ClearCounters();
  104. RawPtrCountingMayDangleImpl::ClearCounters();
  105. }
  106. };
  107. // Struct intended to be used with designated initializers and passed
  108. // to the `CountingRawPtrHas()` matcher.
  109. struct CountingRawPtrExpectations {
  110. absl::optional<int> wrap_raw_ptr_cnt;
  111. absl::optional<int> release_wrapped_ptr_cnt;
  112. absl::optional<int> get_for_dereference_cnt;
  113. absl::optional<int> get_for_extraction_cnt;
  114. absl::optional<int> get_for_comparison_cnt;
  115. absl::optional<int> wrapped_ptr_swap_cnt;
  116. absl::optional<int> wrapped_ptr_less_cnt;
  117. absl::optional<int> pointer_to_member_operator_cnt;
  118. };
  119. #define REPORT_UNEQUAL_RAW_PTR_COUNTER(member_name, CounterClassImpl) \
  120. { \
  121. if (arg.member_name.has_value() && \
  122. arg.member_name.value() != CounterClassImpl::member_name) { \
  123. *result_listener << "Expected `" #member_name "` to be " \
  124. << arg.member_name.value() << " but got " \
  125. << CounterClassImpl::member_name << "; "; \
  126. result = false; \
  127. } \
  128. }
  129. #define REPORT_UNEQUAL_RAW_PTR_COUNTERS(result, CounterClassImpl) \
  130. { \
  131. result = true; \
  132. REPORT_UNEQUAL_RAW_PTR_COUNTER(wrap_raw_ptr_cnt, CounterClassImpl) \
  133. REPORT_UNEQUAL_RAW_PTR_COUNTER(release_wrapped_ptr_cnt, CounterClassImpl) \
  134. REPORT_UNEQUAL_RAW_PTR_COUNTER(get_for_dereference_cnt, CounterClassImpl) \
  135. REPORT_UNEQUAL_RAW_PTR_COUNTER(get_for_extraction_cnt, CounterClassImpl) \
  136. REPORT_UNEQUAL_RAW_PTR_COUNTER(get_for_comparison_cnt, CounterClassImpl) \
  137. REPORT_UNEQUAL_RAW_PTR_COUNTER(wrapped_ptr_swap_cnt, CounterClassImpl) \
  138. REPORT_UNEQUAL_RAW_PTR_COUNTER(wrapped_ptr_less_cnt, CounterClassImpl) \
  139. REPORT_UNEQUAL_RAW_PTR_COUNTER(pointer_to_member_operator_cnt, \
  140. CounterClassImpl) \
  141. }
  142. // Matcher used with `CountingRawPtr`. Provides slightly shorter
  143. // boilerplate for verifying counts.
  144. // Implicit `arg` has type `CountingRawPtrExpectations`.
  145. MATCHER(CountingRawPtrHasCounts, "`CountingRawPtr` has specified counters") {
  146. bool result = true;
  147. REPORT_UNEQUAL_RAW_PTR_COUNTERS(result, RawPtrCountingImpl);
  148. return result;
  149. }
  150. // Implicit `arg` has type `CountingRawPtrExpectations`.
  151. MATCHER(MayDangleCountingRawPtrHasCounts,
  152. "`MayDangleCountingRawPtr` has specified counters") {
  153. bool result = true;
  154. REPORT_UNEQUAL_RAW_PTR_COUNTERS(result, RawPtrCountingMayDangleImpl);
  155. return result;
  156. }
  157. #undef REPORT_UNEQUAL_RAW_PTR_COUNTERS
  158. #undef REPORT_UNEQUAL_RAW_PTR_COUNTER
  159. // Use this instead of std::ignore, to prevent the instruction from getting
  160. // optimized out by the compiler.
  161. volatile int g_volatile_int_to_ignore;
  162. TEST_F(RawPtrTest, NullStarDereference) {
  163. raw_ptr<int> ptr = nullptr;
  164. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptr, "");
  165. }
  166. TEST_F(RawPtrTest, NullArrowDereference) {
  167. raw_ptr<MyStruct> ptr = nullptr;
  168. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = ptr->x, "");
  169. }
  170. TEST_F(RawPtrTest, NullExtractNoDereference) {
  171. CountingRawPtr<int> ptr = nullptr;
  172. // No dereference hence shouldn't crash.
  173. int* raw = ptr;
  174. std::ignore = raw;
  175. EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
  176. .get_for_extraction_cnt = 1,
  177. .get_for_comparison_cnt = 0}),
  178. CountingRawPtrHasCounts());
  179. }
  180. TEST_F(RawPtrTest, NullCmpExplicit) {
  181. CountingRawPtr<int> ptr = nullptr;
  182. EXPECT_TRUE(ptr == nullptr);
  183. EXPECT_TRUE(nullptr == ptr);
  184. EXPECT_FALSE(ptr != nullptr);
  185. EXPECT_FALSE(nullptr != ptr);
  186. // No need to unwrap pointer, just compare against 0.
  187. EXPECT_THAT((CountingRawPtrExpectations{
  188. .get_for_dereference_cnt = 0,
  189. .get_for_extraction_cnt = 0,
  190. .get_for_comparison_cnt = 0,
  191. }),
  192. CountingRawPtrHasCounts());
  193. }
  194. TEST_F(RawPtrTest, NullCmpBool) {
  195. CountingRawPtr<int> ptr = nullptr;
  196. EXPECT_FALSE(ptr);
  197. EXPECT_TRUE(!ptr);
  198. // No need to unwrap pointer, just compare against 0.
  199. EXPECT_THAT((CountingRawPtrExpectations{
  200. .get_for_dereference_cnt = 0,
  201. .get_for_extraction_cnt = 0,
  202. .get_for_comparison_cnt = 0,
  203. }),
  204. CountingRawPtrHasCounts());
  205. }
  206. void FuncThatAcceptsBool(bool b) {}
  207. bool IsValidNoCast(CountingRawPtr<int> ptr) {
  208. return !!ptr; // !! to avoid implicit cast
  209. }
  210. bool IsValidNoCast2(CountingRawPtr<int> ptr) {
  211. return ptr && true;
  212. }
  213. TEST_F(RawPtrTest, BoolOpNotCast) {
  214. CountingRawPtr<int> ptr = nullptr;
  215. volatile bool is_valid = !!ptr; // !! to avoid implicit cast
  216. is_valid = ptr || is_valid; // volatile, so won't be optimized
  217. if (ptr)
  218. is_valid = true;
  219. [[maybe_unused]] bool is_not_valid = !ptr;
  220. if (!ptr)
  221. is_not_valid = true;
  222. std::ignore = IsValidNoCast(ptr);
  223. std::ignore = IsValidNoCast2(ptr);
  224. FuncThatAcceptsBool(!ptr);
  225. // No need to unwrap pointer, just compare against 0.
  226. EXPECT_THAT((CountingRawPtrExpectations{
  227. .get_for_dereference_cnt = 0,
  228. .get_for_extraction_cnt = 0,
  229. .get_for_comparison_cnt = 0,
  230. }),
  231. CountingRawPtrHasCounts());
  232. }
  233. bool IsValidWithCast(CountingRawPtr<int> ptr) {
  234. return ptr;
  235. }
  236. // This test is mostly for documentation purposes. It demonstrates cases where
  237. // |operator T*| is called first and then the pointer is converted to bool,
  238. // as opposed to calling |operator bool| directly. The former may be more
  239. // costly, so the caller has to be careful not to trigger this path.
  240. TEST_F(RawPtrTest, CastNotBoolOp) {
  241. CountingRawPtr<int> ptr = nullptr;
  242. [[maybe_unused]] bool is_valid = ptr;
  243. is_valid = IsValidWithCast(ptr);
  244. FuncThatAcceptsBool(ptr);
  245. EXPECT_THAT((CountingRawPtrExpectations{
  246. .get_for_dereference_cnt = 0,
  247. .get_for_extraction_cnt = 3,
  248. .get_for_comparison_cnt = 0,
  249. }),
  250. CountingRawPtrHasCounts());
  251. }
  252. TEST_F(RawPtrTest, StarDereference) {
  253. int foo = 42;
  254. CountingRawPtr<int> ptr = &foo;
  255. EXPECT_EQ(*ptr, 42);
  256. EXPECT_THAT((CountingRawPtrExpectations{
  257. .get_for_dereference_cnt = 1,
  258. .get_for_extraction_cnt = 0,
  259. .get_for_comparison_cnt = 0,
  260. }),
  261. CountingRawPtrHasCounts());
  262. }
  263. TEST_F(RawPtrTest, ArrowDereference) {
  264. MyStruct foo = {42};
  265. CountingRawPtr<MyStruct> ptr = &foo;
  266. EXPECT_EQ(ptr->x, 42);
  267. EXPECT_THAT((CountingRawPtrExpectations{
  268. .get_for_dereference_cnt = 1,
  269. .get_for_extraction_cnt = 0,
  270. .get_for_comparison_cnt = 0,
  271. }),
  272. CountingRawPtrHasCounts());
  273. }
  274. TEST_F(RawPtrTest, Delete) {
  275. CountingRawPtr<int> ptr = new int(42);
  276. delete ptr;
  277. // The pointer was extracted using implicit cast before passing to |delete|.
  278. EXPECT_THAT((CountingRawPtrExpectations{
  279. .get_for_dereference_cnt = 0,
  280. .get_for_extraction_cnt = 1,
  281. .get_for_comparison_cnt = 0,
  282. }),
  283. CountingRawPtrHasCounts());
  284. }
  285. TEST_F(RawPtrTest, ClearAndDelete) {
  286. CountingRawPtr<int> ptr(new int);
  287. ptr.ClearAndDelete();
  288. // TODO(crbug.com/1346513): clang-format has a difficult time making
  289. // sense of preprocessor arms mixed with designated initializers.
  290. //
  291. // clang-format off
  292. EXPECT_THAT((CountingRawPtrExpectations {
  293. .wrap_raw_ptr_cnt = 1,
  294. .release_wrapped_ptr_cnt = 1,
  295. .get_for_dereference_cnt = 0,
  296. .get_for_extraction_cnt = 1,
  297. .wrapped_ptr_swap_cnt = 0,
  298. }),
  299. CountingRawPtrHasCounts());
  300. // clang-format on
  301. EXPECT_EQ(ptr.get(), nullptr);
  302. }
  303. TEST_F(RawPtrTest, ClearAndDeleteArray) {
  304. CountingRawPtr<int> ptr(new int[8]);
  305. ptr.ClearAndDeleteArray();
  306. // TODO(crbug.com/1346513): clang-format has a difficult time making
  307. // sense of preprocessor arms mixed with designated initializers.
  308. //
  309. // clang-format off
  310. EXPECT_THAT((CountingRawPtrExpectations {
  311. .wrap_raw_ptr_cnt = 1,
  312. .release_wrapped_ptr_cnt = 1,
  313. .get_for_dereference_cnt = 0,
  314. .get_for_extraction_cnt = 1,
  315. .wrapped_ptr_swap_cnt = 0,
  316. }),
  317. CountingRawPtrHasCounts());
  318. // clang-format on
  319. EXPECT_EQ(ptr.get(), nullptr);
  320. }
  321. TEST_F(RawPtrTest, ExtractAsDangling) {
  322. CountingRawPtr<int> ptr(new int);
  323. if constexpr (std::is_same_v<RawPtrCountingImpl,
  324. RawPtrCountingMayDangleImpl>) {
  325. auto expectations = CountingRawPtrExpectations{
  326. .wrap_raw_ptr_cnt = 1,
  327. .release_wrapped_ptr_cnt = 0,
  328. .get_for_dereference_cnt = 0,
  329. .wrapped_ptr_swap_cnt = 0,
  330. };
  331. EXPECT_THAT((expectations), CountingRawPtrHasCounts());
  332. EXPECT_THAT((expectations), MayDangleCountingRawPtrHasCounts());
  333. } else {
  334. EXPECT_THAT((CountingRawPtrExpectations{
  335. .wrap_raw_ptr_cnt = 1,
  336. .release_wrapped_ptr_cnt = 0,
  337. .get_for_dereference_cnt = 0,
  338. .wrapped_ptr_swap_cnt = 0,
  339. }),
  340. CountingRawPtrHasCounts());
  341. EXPECT_THAT((CountingRawPtrExpectations{
  342. .wrap_raw_ptr_cnt = 0,
  343. .release_wrapped_ptr_cnt = 0,
  344. .get_for_dereference_cnt = 0,
  345. .wrapped_ptr_swap_cnt = 0,
  346. }),
  347. MayDangleCountingRawPtrHasCounts());
  348. }
  349. EXPECT_TRUE(ptr.get());
  350. CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
  351. if constexpr (std::is_same_v<RawPtrCountingImpl,
  352. RawPtrCountingMayDangleImpl>) {
  353. auto expectations = CountingRawPtrExpectations{
  354. .wrap_raw_ptr_cnt = 1,
  355. .release_wrapped_ptr_cnt = 1,
  356. .get_for_dereference_cnt = 0,
  357. .wrapped_ptr_swap_cnt = 0,
  358. };
  359. EXPECT_THAT((expectations), CountingRawPtrHasCounts());
  360. EXPECT_THAT((expectations), MayDangleCountingRawPtrHasCounts());
  361. } else {
  362. EXPECT_THAT((CountingRawPtrExpectations{
  363. .wrap_raw_ptr_cnt = 1,
  364. .release_wrapped_ptr_cnt = 1,
  365. .get_for_dereference_cnt = 0,
  366. .wrapped_ptr_swap_cnt = 0,
  367. }),
  368. CountingRawPtrHasCounts());
  369. EXPECT_THAT((CountingRawPtrExpectations{
  370. .wrap_raw_ptr_cnt = 1,
  371. .release_wrapped_ptr_cnt = 0,
  372. .get_for_dereference_cnt = 0,
  373. .wrapped_ptr_swap_cnt = 0,
  374. }),
  375. MayDangleCountingRawPtrHasCounts());
  376. }
  377. EXPECT_FALSE(ptr.get());
  378. EXPECT_TRUE(dangling.get());
  379. dangling.ClearAndDelete();
  380. }
  381. TEST_F(RawPtrTest, ExtractAsDanglingFromDangling) {
  382. CountingRawPtrMayDangle<int> ptr(new int);
  383. EXPECT_THAT((CountingRawPtrExpectations{
  384. .wrap_raw_ptr_cnt = 1,
  385. .release_wrapped_ptr_cnt = 0,
  386. .get_for_dereference_cnt = 0,
  387. .wrapped_ptr_swap_cnt = 0,
  388. }),
  389. MayDangleCountingRawPtrHasCounts());
  390. CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
  391. // wrap_raw_ptr_cnt remains `1` because, as `ptr` is already a dangling
  392. // pointer, we are only moving `ptr` to `dangling` here to avoid extra cost.
  393. EXPECT_THAT((CountingRawPtrExpectations{
  394. .wrap_raw_ptr_cnt = 1,
  395. .release_wrapped_ptr_cnt = 1,
  396. .get_for_dereference_cnt = 0,
  397. .wrapped_ptr_swap_cnt = 0,
  398. }),
  399. MayDangleCountingRawPtrHasCounts());
  400. dangling.ClearAndDelete();
  401. }
  402. TEST_F(RawPtrTest, ConstVolatileVoidPtr) {
  403. int32_t foo[] = {1234567890};
  404. CountingRawPtr<const volatile void> ptr = foo;
  405. EXPECT_EQ(*static_cast<const volatile int32_t*>(ptr), 1234567890);
  406. // Because we're using a cast, the extraction API kicks in, which doesn't
  407. // know if the extracted pointer will be dereferenced or not.
  408. EXPECT_THAT((CountingRawPtrExpectations{
  409. .get_for_dereference_cnt = 0,
  410. .get_for_extraction_cnt = 1,
  411. .get_for_comparison_cnt = 0,
  412. }),
  413. CountingRawPtrHasCounts());
  414. }
  415. TEST_F(RawPtrTest, VoidPtr) {
  416. int32_t foo[] = {1234567890};
  417. CountingRawPtr<void> ptr = foo;
  418. EXPECT_EQ(*static_cast<int32_t*>(ptr), 1234567890);
  419. // Because we're using a cast, the extraction API kicks in, which doesn't
  420. // know if the extracted pointer will be dereferenced or not.
  421. EXPECT_THAT((CountingRawPtrExpectations{
  422. .get_for_dereference_cnt = 0,
  423. .get_for_extraction_cnt = 1,
  424. .get_for_comparison_cnt = 0,
  425. }),
  426. CountingRawPtrHasCounts());
  427. }
  428. TEST_F(RawPtrTest, OperatorEQ) {
  429. int foo;
  430. CountingRawPtr<int> ptr1 = nullptr;
  431. EXPECT_TRUE(ptr1 == ptr1);
  432. CountingRawPtr<int> ptr2 = nullptr;
  433. EXPECT_TRUE(ptr1 == ptr2);
  434. CountingRawPtr<int> ptr3 = &foo;
  435. EXPECT_TRUE(&foo == ptr3);
  436. EXPECT_TRUE(ptr3 == &foo);
  437. EXPECT_FALSE(ptr1 == ptr3);
  438. ptr1 = &foo;
  439. EXPECT_TRUE(ptr1 == ptr3);
  440. EXPECT_TRUE(ptr3 == ptr1);
  441. EXPECT_THAT((CountingRawPtrExpectations{
  442. .get_for_dereference_cnt = 0,
  443. .get_for_extraction_cnt = 0,
  444. .get_for_comparison_cnt = 12,
  445. }),
  446. CountingRawPtrHasCounts());
  447. }
  448. TEST_F(RawPtrTest, OperatorNE) {
  449. int foo;
  450. CountingRawPtr<int> ptr1 = nullptr;
  451. EXPECT_FALSE(ptr1 != ptr1);
  452. CountingRawPtr<int> ptr2 = nullptr;
  453. EXPECT_FALSE(ptr1 != ptr2);
  454. CountingRawPtr<int> ptr3 = &foo;
  455. EXPECT_FALSE(&foo != ptr3);
  456. EXPECT_FALSE(ptr3 != &foo);
  457. EXPECT_TRUE(ptr1 != ptr3);
  458. ptr1 = &foo;
  459. EXPECT_FALSE(ptr1 != ptr3);
  460. EXPECT_FALSE(ptr3 != ptr1);
  461. EXPECT_THAT((CountingRawPtrExpectations{
  462. .get_for_dereference_cnt = 0,
  463. .get_for_extraction_cnt = 0,
  464. .get_for_comparison_cnt = 12,
  465. }),
  466. CountingRawPtrHasCounts());
  467. }
  468. TEST_F(RawPtrTest, OperatorEQCast) {
  469. int foo = 42;
  470. const int* raw_int_ptr = &foo;
  471. volatile void* raw_void_ptr = &foo;
  472. CountingRawPtr<volatile int> checked_int_ptr = &foo;
  473. CountingRawPtr<const void> checked_void_ptr = &foo;
  474. EXPECT_TRUE(checked_int_ptr == checked_int_ptr);
  475. EXPECT_TRUE(checked_int_ptr == raw_int_ptr);
  476. EXPECT_TRUE(raw_int_ptr == checked_int_ptr);
  477. EXPECT_TRUE(checked_void_ptr == checked_void_ptr);
  478. EXPECT_TRUE(checked_void_ptr == raw_void_ptr);
  479. EXPECT_TRUE(raw_void_ptr == checked_void_ptr);
  480. EXPECT_TRUE(checked_int_ptr == checked_void_ptr);
  481. EXPECT_TRUE(checked_int_ptr == raw_void_ptr);
  482. EXPECT_TRUE(raw_int_ptr == checked_void_ptr);
  483. EXPECT_TRUE(checked_void_ptr == checked_int_ptr);
  484. EXPECT_TRUE(checked_void_ptr == raw_int_ptr);
  485. EXPECT_TRUE(raw_void_ptr == checked_int_ptr);
  486. // Make sure that all cases are handled by operator== (faster) and none by the
  487. // cast operator (slower).
  488. EXPECT_THAT((CountingRawPtrExpectations{
  489. .get_for_dereference_cnt = 0,
  490. .get_for_extraction_cnt = 0,
  491. .get_for_comparison_cnt = 16,
  492. }),
  493. CountingRawPtrHasCounts());
  494. }
  495. TEST_F(RawPtrTest, OperatorEQCastHierarchy) {
  496. Derived derived_val(42, 84, 1024);
  497. Derived* raw_derived_ptr = &derived_val;
  498. const Base1* raw_base1_ptr = &derived_val;
  499. volatile Base2* raw_base2_ptr = &derived_val;
  500. // Double check the basic understanding of pointers: Even though the numeric
  501. // value (i.e. the address) isn't equal, the pointers are still equal. That's
  502. // because from derived to base adjusts the address.
  503. // raw_ptr must behave the same, which is checked below.
  504. ASSERT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
  505. reinterpret_cast<uintptr_t>(raw_derived_ptr));
  506. ASSERT_TRUE(raw_base2_ptr == raw_derived_ptr);
  507. CountingRawPtr<const volatile Derived> checked_derived_ptr = &derived_val;
  508. CountingRawPtr<volatile Base1> checked_base1_ptr = &derived_val;
  509. CountingRawPtr<const Base2> checked_base2_ptr = &derived_val;
  510. EXPECT_TRUE(checked_derived_ptr == checked_derived_ptr);
  511. EXPECT_TRUE(checked_derived_ptr == raw_derived_ptr);
  512. EXPECT_TRUE(raw_derived_ptr == checked_derived_ptr);
  513. EXPECT_TRUE(checked_derived_ptr == checked_base1_ptr);
  514. EXPECT_TRUE(checked_derived_ptr == raw_base1_ptr);
  515. EXPECT_TRUE(raw_derived_ptr == checked_base1_ptr);
  516. EXPECT_TRUE(checked_base1_ptr == checked_derived_ptr);
  517. EXPECT_TRUE(checked_base1_ptr == raw_derived_ptr);
  518. EXPECT_TRUE(raw_base1_ptr == checked_derived_ptr);
  519. // |base2_ptr| points to the second base class of |derived|, so will be
  520. // located at an offset. While the stored raw uinptr_t values shouldn't match,
  521. // ensure that the internal pointer manipulation correctly offsets when
  522. // casting up and down the class hierarchy.
  523. EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
  524. reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
  525. EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
  526. reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
  527. EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
  528. reinterpret_cast<uintptr_t>(raw_derived_ptr));
  529. EXPECT_TRUE(checked_derived_ptr == checked_base2_ptr);
  530. EXPECT_TRUE(checked_derived_ptr == raw_base2_ptr);
  531. EXPECT_TRUE(raw_derived_ptr == checked_base2_ptr);
  532. EXPECT_TRUE(checked_base2_ptr == checked_derived_ptr);
  533. EXPECT_TRUE(checked_base2_ptr == raw_derived_ptr);
  534. EXPECT_TRUE(raw_base2_ptr == checked_derived_ptr);
  535. // Make sure that all cases are handled by operator== (faster) and none by the
  536. // cast operator (slower).
  537. // The 4 extractions come from .get() checks, that compare raw addresses.
  538. EXPECT_THAT((CountingRawPtrExpectations{
  539. .get_for_dereference_cnt = 0,
  540. .get_for_extraction_cnt = 4,
  541. .get_for_comparison_cnt = 20,
  542. }),
  543. CountingRawPtrHasCounts());
  544. }
  545. TEST_F(RawPtrTest, OperatorNECast) {
  546. int foo = 42;
  547. volatile int* raw_int_ptr = &foo;
  548. const void* raw_void_ptr = &foo;
  549. CountingRawPtr<const int> checked_int_ptr = &foo;
  550. CountingRawPtr<volatile void> checked_void_ptr = &foo;
  551. EXPECT_FALSE(checked_int_ptr != checked_int_ptr);
  552. EXPECT_FALSE(checked_int_ptr != raw_int_ptr);
  553. EXPECT_FALSE(raw_int_ptr != checked_int_ptr);
  554. EXPECT_FALSE(checked_void_ptr != checked_void_ptr);
  555. EXPECT_FALSE(checked_void_ptr != raw_void_ptr);
  556. EXPECT_FALSE(raw_void_ptr != checked_void_ptr);
  557. EXPECT_FALSE(checked_int_ptr != checked_void_ptr);
  558. EXPECT_FALSE(checked_int_ptr != raw_void_ptr);
  559. EXPECT_FALSE(raw_int_ptr != checked_void_ptr);
  560. EXPECT_FALSE(checked_void_ptr != checked_int_ptr);
  561. EXPECT_FALSE(checked_void_ptr != raw_int_ptr);
  562. EXPECT_FALSE(raw_void_ptr != checked_int_ptr);
  563. // Make sure that all cases are handled by operator== (faster) and none by the
  564. // cast operator (slower).
  565. EXPECT_THAT((CountingRawPtrExpectations{
  566. .get_for_dereference_cnt = 0,
  567. .get_for_extraction_cnt = 0,
  568. .get_for_comparison_cnt = 16,
  569. }),
  570. CountingRawPtrHasCounts());
  571. }
  572. TEST_F(RawPtrTest, OperatorNECastHierarchy) {
  573. Derived derived_val(42, 84, 1024);
  574. const Derived* raw_derived_ptr = &derived_val;
  575. volatile Base1* raw_base1_ptr = &derived_val;
  576. const Base2* raw_base2_ptr = &derived_val;
  577. CountingRawPtr<volatile Derived> checked_derived_ptr = &derived_val;
  578. CountingRawPtr<const Base1> checked_base1_ptr = &derived_val;
  579. CountingRawPtr<const volatile Base2> checked_base2_ptr = &derived_val;
  580. EXPECT_FALSE(checked_derived_ptr != checked_derived_ptr);
  581. EXPECT_FALSE(checked_derived_ptr != raw_derived_ptr);
  582. EXPECT_FALSE(raw_derived_ptr != checked_derived_ptr);
  583. EXPECT_FALSE(checked_derived_ptr != checked_base1_ptr);
  584. EXPECT_FALSE(checked_derived_ptr != raw_base1_ptr);
  585. EXPECT_FALSE(raw_derived_ptr != checked_base1_ptr);
  586. EXPECT_FALSE(checked_base1_ptr != checked_derived_ptr);
  587. EXPECT_FALSE(checked_base1_ptr != raw_derived_ptr);
  588. EXPECT_FALSE(raw_base1_ptr != checked_derived_ptr);
  589. // |base2_ptr| points to the second base class of |derived|, so will be
  590. // located at an offset. While the stored raw uinptr_t values shouldn't match,
  591. // ensure that the internal pointer manipulation correctly offsets when
  592. // casting up and down the class hierarchy.
  593. EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
  594. reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
  595. EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
  596. reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
  597. EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
  598. reinterpret_cast<uintptr_t>(raw_derived_ptr));
  599. EXPECT_FALSE(checked_derived_ptr != checked_base2_ptr);
  600. EXPECT_FALSE(checked_derived_ptr != raw_base2_ptr);
  601. EXPECT_FALSE(raw_derived_ptr != checked_base2_ptr);
  602. EXPECT_FALSE(checked_base2_ptr != checked_derived_ptr);
  603. EXPECT_FALSE(checked_base2_ptr != raw_derived_ptr);
  604. EXPECT_FALSE(raw_base2_ptr != checked_derived_ptr);
  605. // Make sure that all cases are handled by operator== (faster) and none by the
  606. // cast operator (slower).
  607. // The 4 extractions come from .get() checks, that compare raw addresses.
  608. EXPECT_THAT((CountingRawPtrExpectations{
  609. .get_for_dereference_cnt = 0,
  610. .get_for_extraction_cnt = 4,
  611. .get_for_comparison_cnt = 20,
  612. }),
  613. CountingRawPtrHasCounts());
  614. }
  615. TEST_F(RawPtrTest, Cast) {
  616. Derived derived_val(42, 84, 1024);
  617. raw_ptr<Derived> checked_derived_ptr = &derived_val;
  618. Base1* raw_base1_ptr = checked_derived_ptr;
  619. EXPECT_EQ(raw_base1_ptr->b1, 42);
  620. Base2* raw_base2_ptr = checked_derived_ptr;
  621. EXPECT_EQ(raw_base2_ptr->b2, 84);
  622. Derived* raw_derived_ptr = static_cast<Derived*>(raw_base1_ptr);
  623. EXPECT_EQ(raw_derived_ptr->b1, 42);
  624. EXPECT_EQ(raw_derived_ptr->b2, 84);
  625. EXPECT_EQ(raw_derived_ptr->d, 1024);
  626. raw_derived_ptr = static_cast<Derived*>(raw_base2_ptr);
  627. EXPECT_EQ(raw_derived_ptr->b1, 42);
  628. EXPECT_EQ(raw_derived_ptr->b2, 84);
  629. EXPECT_EQ(raw_derived_ptr->d, 1024);
  630. raw_ptr<Base1> checked_base1_ptr = raw_derived_ptr;
  631. EXPECT_EQ(checked_base1_ptr->b1, 42);
  632. raw_ptr<Base2> checked_base2_ptr = raw_derived_ptr;
  633. EXPECT_EQ(checked_base2_ptr->b2, 84);
  634. raw_ptr<Derived> checked_derived_ptr2 =
  635. static_cast<Derived*>(checked_base1_ptr);
  636. EXPECT_EQ(checked_derived_ptr2->b1, 42);
  637. EXPECT_EQ(checked_derived_ptr2->b2, 84);
  638. EXPECT_EQ(checked_derived_ptr2->d, 1024);
  639. checked_derived_ptr2 = static_cast<Derived*>(checked_base2_ptr);
  640. EXPECT_EQ(checked_derived_ptr2->b1, 42);
  641. EXPECT_EQ(checked_derived_ptr2->b2, 84);
  642. EXPECT_EQ(checked_derived_ptr2->d, 1024);
  643. const Derived* raw_const_derived_ptr = checked_derived_ptr2;
  644. EXPECT_EQ(raw_const_derived_ptr->b1, 42);
  645. EXPECT_EQ(raw_const_derived_ptr->b2, 84);
  646. EXPECT_EQ(raw_const_derived_ptr->d, 1024);
  647. raw_ptr<const Derived> checked_const_derived_ptr = raw_const_derived_ptr;
  648. EXPECT_EQ(checked_const_derived_ptr->b1, 42);
  649. EXPECT_EQ(checked_const_derived_ptr->b2, 84);
  650. EXPECT_EQ(checked_const_derived_ptr->d, 1024);
  651. const Derived* raw_const_derived_ptr2 = checked_const_derived_ptr;
  652. EXPECT_EQ(raw_const_derived_ptr2->b1, 42);
  653. EXPECT_EQ(raw_const_derived_ptr2->b2, 84);
  654. EXPECT_EQ(raw_const_derived_ptr2->d, 1024);
  655. raw_ptr<const Derived> checked_const_derived_ptr2 = raw_derived_ptr;
  656. EXPECT_EQ(checked_const_derived_ptr2->b1, 42);
  657. EXPECT_EQ(checked_const_derived_ptr2->b2, 84);
  658. EXPECT_EQ(checked_const_derived_ptr2->d, 1024);
  659. raw_ptr<const Derived> checked_const_derived_ptr3 = checked_derived_ptr2;
  660. EXPECT_EQ(checked_const_derived_ptr3->b1, 42);
  661. EXPECT_EQ(checked_const_derived_ptr3->b2, 84);
  662. EXPECT_EQ(checked_const_derived_ptr3->d, 1024);
  663. volatile Derived* raw_volatile_derived_ptr = checked_derived_ptr2;
  664. EXPECT_EQ(raw_volatile_derived_ptr->b1, 42);
  665. EXPECT_EQ(raw_volatile_derived_ptr->b2, 84);
  666. EXPECT_EQ(raw_volatile_derived_ptr->d, 1024);
  667. raw_ptr<volatile Derived> checked_volatile_derived_ptr =
  668. raw_volatile_derived_ptr;
  669. EXPECT_EQ(checked_volatile_derived_ptr->b1, 42);
  670. EXPECT_EQ(checked_volatile_derived_ptr->b2, 84);
  671. EXPECT_EQ(checked_volatile_derived_ptr->d, 1024);
  672. void* raw_void_ptr = checked_derived_ptr;
  673. raw_ptr<void> checked_void_ptr = raw_derived_ptr;
  674. raw_ptr<Derived> checked_derived_ptr3 = static_cast<Derived*>(raw_void_ptr);
  675. raw_ptr<Derived> checked_derived_ptr4 =
  676. static_cast<Derived*>(checked_void_ptr);
  677. EXPECT_EQ(checked_derived_ptr3->b1, 42);
  678. EXPECT_EQ(checked_derived_ptr3->b2, 84);
  679. EXPECT_EQ(checked_derived_ptr3->d, 1024);
  680. EXPECT_EQ(checked_derived_ptr4->b1, 42);
  681. EXPECT_EQ(checked_derived_ptr4->b2, 84);
  682. EXPECT_EQ(checked_derived_ptr4->d, 1024);
  683. }
  684. TEST_F(RawPtrTest, UpcastConvertible) {
  685. {
  686. Derived derived_val(42, 84, 1024);
  687. raw_ptr<Derived> checked_derived_ptr = &derived_val;
  688. raw_ptr<Base1> checked_base1_ptr(checked_derived_ptr);
  689. EXPECT_EQ(checked_base1_ptr->b1, 42);
  690. raw_ptr<Base2> checked_base2_ptr(checked_derived_ptr);
  691. EXPECT_EQ(checked_base2_ptr->b2, 84);
  692. checked_base1_ptr = checked_derived_ptr;
  693. EXPECT_EQ(checked_base1_ptr->b1, 42);
  694. checked_base2_ptr = checked_derived_ptr;
  695. EXPECT_EQ(checked_base2_ptr->b2, 84);
  696. EXPECT_EQ(checked_base1_ptr, checked_derived_ptr);
  697. EXPECT_EQ(checked_base2_ptr, checked_derived_ptr);
  698. }
  699. {
  700. Derived derived_val(42, 84, 1024);
  701. raw_ptr<Derived> checked_derived_ptr1 = &derived_val;
  702. raw_ptr<Derived> checked_derived_ptr2 = &derived_val;
  703. raw_ptr<Derived> checked_derived_ptr3 = &derived_val;
  704. raw_ptr<Derived> checked_derived_ptr4 = &derived_val;
  705. raw_ptr<Base1> checked_base1_ptr(std::move(checked_derived_ptr1));
  706. EXPECT_EQ(checked_base1_ptr->b1, 42);
  707. raw_ptr<Base2> checked_base2_ptr(std::move(checked_derived_ptr2));
  708. EXPECT_EQ(checked_base2_ptr->b2, 84);
  709. checked_base1_ptr = std::move(checked_derived_ptr3);
  710. EXPECT_EQ(checked_base1_ptr->b1, 42);
  711. checked_base2_ptr = std::move(checked_derived_ptr4);
  712. EXPECT_EQ(checked_base2_ptr->b2, 84);
  713. }
  714. }
  715. TEST_F(RawPtrTest, UpcastNotConvertible) {
  716. class Base {};
  717. class Derived : private Base {};
  718. class Unrelated {};
  719. EXPECT_FALSE((std::is_convertible<raw_ptr<Derived>, raw_ptr<Base>>::value));
  720. EXPECT_FALSE((std::is_convertible<raw_ptr<Unrelated>, raw_ptr<Base>>::value));
  721. EXPECT_FALSE((std::is_convertible<raw_ptr<Unrelated>, raw_ptr<void>>::value));
  722. EXPECT_FALSE((std::is_convertible<raw_ptr<void>, raw_ptr<Unrelated>>::value));
  723. EXPECT_FALSE(
  724. (std::is_convertible<raw_ptr<int64_t>, raw_ptr<int32_t>>::value));
  725. EXPECT_FALSE(
  726. (std::is_convertible<raw_ptr<int16_t>, raw_ptr<int32_t>>::value));
  727. }
  728. TEST_F(RawPtrTest, UpcastPerformance) {
  729. {
  730. Derived derived_val(42, 84, 1024);
  731. CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
  732. CountingRawPtr<Base1> checked_base1_ptr(checked_derived_ptr);
  733. CountingRawPtr<Base2> checked_base2_ptr(checked_derived_ptr);
  734. checked_base1_ptr = checked_derived_ptr;
  735. checked_base2_ptr = checked_derived_ptr;
  736. }
  737. {
  738. Derived derived_val(42, 84, 1024);
  739. CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
  740. CountingRawPtr<Base1> checked_base1_ptr(std::move(checked_derived_ptr));
  741. CountingRawPtr<Base2> checked_base2_ptr(std::move(checked_derived_ptr));
  742. checked_base1_ptr = std::move(checked_derived_ptr);
  743. checked_base2_ptr = std::move(checked_derived_ptr);
  744. }
  745. EXPECT_THAT((CountingRawPtrExpectations{
  746. .get_for_dereference_cnt = 0,
  747. .get_for_extraction_cnt = 0,
  748. .get_for_comparison_cnt = 0,
  749. }),
  750. CountingRawPtrHasCounts());
  751. }
  752. TEST_F(RawPtrTest, CustomSwap) {
  753. int foo1, foo2;
  754. CountingRawPtr<int> ptr1(&foo1);
  755. CountingRawPtr<int> ptr2(&foo2);
  756. // Recommended use pattern.
  757. using std::swap;
  758. swap(ptr1, ptr2);
  759. EXPECT_EQ(ptr1.get(), &foo2);
  760. EXPECT_EQ(ptr2.get(), &foo1);
  761. EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 1);
  762. }
  763. TEST_F(RawPtrTest, StdSwap) {
  764. int foo1, foo2;
  765. CountingRawPtr<int> ptr1(&foo1);
  766. CountingRawPtr<int> ptr2(&foo2);
  767. std::swap(ptr1, ptr2);
  768. EXPECT_EQ(ptr1.get(), &foo2);
  769. EXPECT_EQ(ptr2.get(), &foo1);
  770. EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 0);
  771. }
  772. TEST_F(RawPtrTest, PostIncrementOperator) {
  773. int foo[] = {42, 43, 44, 45};
  774. CountingRawPtr<int> ptr = foo;
  775. for (int i = 0; i < 4; ++i) {
  776. ASSERT_EQ(*ptr++, 42 + i);
  777. }
  778. EXPECT_THAT((CountingRawPtrExpectations{
  779. .get_for_dereference_cnt = 4,
  780. .get_for_extraction_cnt = 0,
  781. .get_for_comparison_cnt = 0,
  782. }),
  783. CountingRawPtrHasCounts());
  784. }
  785. TEST_F(RawPtrTest, PostDecrementOperator) {
  786. int foo[] = {42, 43, 44, 45};
  787. CountingRawPtr<int> ptr = &foo[3];
  788. for (int i = 3; i >= 0; --i) {
  789. ASSERT_EQ(*ptr--, 42 + i);
  790. }
  791. EXPECT_THAT((CountingRawPtrExpectations{
  792. .get_for_dereference_cnt = 4,
  793. .get_for_extraction_cnt = 0,
  794. .get_for_comparison_cnt = 0,
  795. }),
  796. CountingRawPtrHasCounts());
  797. }
  798. TEST_F(RawPtrTest, PreIncrementOperator) {
  799. int foo[] = {42, 43, 44, 45};
  800. CountingRawPtr<int> ptr = foo;
  801. for (int i = 0; i < 4; ++i, ++ptr) {
  802. ASSERT_EQ(*ptr, 42 + i);
  803. }
  804. EXPECT_THAT((CountingRawPtrExpectations{
  805. .get_for_dereference_cnt = 4,
  806. .get_for_extraction_cnt = 0,
  807. .get_for_comparison_cnt = 0,
  808. }),
  809. CountingRawPtrHasCounts());
  810. }
  811. TEST_F(RawPtrTest, PreDecrementOperator) {
  812. int foo[] = {42, 43, 44, 45};
  813. CountingRawPtr<int> ptr = &foo[3];
  814. for (int i = 3; i >= 0; --i, --ptr) {
  815. ASSERT_EQ(*ptr, 42 + i);
  816. }
  817. EXPECT_THAT((CountingRawPtrExpectations{
  818. .get_for_dereference_cnt = 4,
  819. .get_for_extraction_cnt = 0,
  820. .get_for_comparison_cnt = 0,
  821. }),
  822. CountingRawPtrHasCounts());
  823. }
  824. TEST_F(RawPtrTest, PlusEqualOperator) {
  825. int foo[] = {42, 43, 44, 45};
  826. CountingRawPtr<int> ptr = foo;
  827. for (int i = 0; i < 4; i += 2, ptr += 2) {
  828. ASSERT_EQ(*ptr, 42 + i);
  829. }
  830. EXPECT_THAT((CountingRawPtrExpectations{
  831. .get_for_dereference_cnt = 2,
  832. .get_for_extraction_cnt = 0,
  833. .get_for_comparison_cnt = 0,
  834. }),
  835. CountingRawPtrHasCounts());
  836. }
  837. TEST_F(RawPtrTest, PlusEqualOperatorTypes) {
  838. int foo[] = {42, 43, 44, 45};
  839. CountingRawPtr<int> ptr = foo;
  840. ASSERT_EQ(*ptr, 42);
  841. ptr += 2; // Positive literal.
  842. ASSERT_EQ(*ptr, 44);
  843. ptr -= 2; // Negative literal.
  844. ASSERT_EQ(*ptr, 42);
  845. ptr += ptrdiff_t{1}; // ptrdiff_t.
  846. ASSERT_EQ(*ptr, 43);
  847. ptr += size_t{2}; // size_t.
  848. ASSERT_EQ(*ptr, 45);
  849. }
  850. TEST_F(RawPtrTest, MinusEqualOperator) {
  851. int foo[] = {42, 43, 44, 45};
  852. CountingRawPtr<int> ptr = &foo[3];
  853. for (int i = 3; i >= 0; i -= 2, ptr -= 2) {
  854. ASSERT_EQ(*ptr, 42 + i);
  855. }
  856. EXPECT_THAT((CountingRawPtrExpectations{
  857. .get_for_dereference_cnt = 2,
  858. .get_for_extraction_cnt = 0,
  859. .get_for_comparison_cnt = 0,
  860. }),
  861. CountingRawPtrHasCounts());
  862. }
  863. TEST_F(RawPtrTest, MinusEqualOperatorTypes) {
  864. int foo[] = {42, 43, 44, 45};
  865. CountingRawPtr<int> ptr = &foo[3];
  866. ASSERT_EQ(*ptr, 45);
  867. ptr -= 2; // Positive literal.
  868. ASSERT_EQ(*ptr, 43);
  869. ptr -= -2; // Negative literal.
  870. ASSERT_EQ(*ptr, 45);
  871. ptr -= ptrdiff_t{2}; // ptrdiff_t.
  872. ASSERT_EQ(*ptr, 43);
  873. ptr -= size_t{1}; // size_t.
  874. ASSERT_EQ(*ptr, 42);
  875. }
  876. TEST_F(RawPtrTest, AdvanceString) {
  877. const char kChars[] = "Hello";
  878. std::string str = kChars;
  879. CountingRawPtr<const char> ptr = str.c_str();
  880. for (size_t i = 0; i < str.size(); ++i, ++ptr) {
  881. ASSERT_EQ(*ptr, kChars[i]);
  882. }
  883. EXPECT_THAT((CountingRawPtrExpectations{
  884. .get_for_dereference_cnt = 5,
  885. .get_for_extraction_cnt = 0,
  886. .get_for_comparison_cnt = 0,
  887. }),
  888. CountingRawPtrHasCounts());
  889. }
  890. TEST_F(RawPtrTest, AssignmentFromNullptr) {
  891. CountingRawPtr<int> wrapped_ptr;
  892. wrapped_ptr = nullptr;
  893. EXPECT_THAT((CountingRawPtrExpectations{
  894. .wrap_raw_ptr_cnt = 0,
  895. .get_for_dereference_cnt = 0,
  896. .get_for_extraction_cnt = 0,
  897. .get_for_comparison_cnt = 0,
  898. }),
  899. CountingRawPtrHasCounts());
  900. }
  901. void FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr, int* expected_ptr) {
  902. EXPECT_EQ(actual_ptr.get(), expected_ptr);
  903. EXPECT_EQ(*actual_ptr, *expected_ptr);
  904. }
  905. // This test checks that raw_ptr<T> can be passed by value into function
  906. // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
  907. TEST_F(RawPtrTest, FunctionParameters_ImplicitlyMovedTemporary) {
  908. int x = 123;
  909. FunctionWithRawPtrParameter(
  910. raw_ptr<int>(&x), // Temporary that will be moved into the function.
  911. &x);
  912. }
  913. // This test checks that raw_ptr<T> can be passed by value into function
  914. // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
  915. TEST_F(RawPtrTest, FunctionParameters_ExplicitlyMovedLValue) {
  916. int x = 123;
  917. raw_ptr<int> ptr(&x);
  918. FunctionWithRawPtrParameter(std::move(ptr), &x);
  919. }
  920. // This test checks that raw_ptr<T> can be passed by value into function
  921. // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
  922. TEST_F(RawPtrTest, FunctionParameters_Copy) {
  923. int x = 123;
  924. raw_ptr<int> ptr(&x);
  925. FunctionWithRawPtrParameter(ptr, // `ptr` will be copied into the function.
  926. &x);
  927. }
  928. TEST_F(RawPtrTest, SetLookupUsesGetForComparison) {
  929. std::set<CountingRawPtr<int>> set;
  930. int x = 123;
  931. CountingRawPtr<int> ptr(&x);
  932. RawPtrCountingImpl::ClearCounters();
  933. set.emplace(&x);
  934. EXPECT_THAT((CountingRawPtrExpectations{
  935. .wrap_raw_ptr_cnt = 1,
  936. // Nothing to compare to yet.
  937. .get_for_dereference_cnt = 0,
  938. .get_for_extraction_cnt = 0,
  939. .get_for_comparison_cnt = 0,
  940. .wrapped_ptr_less_cnt = 0,
  941. }),
  942. CountingRawPtrHasCounts());
  943. RawPtrCountingImpl::ClearCounters();
  944. set.emplace(ptr);
  945. EXPECT_THAT((CountingRawPtrExpectations{
  946. .wrap_raw_ptr_cnt = 0,
  947. .get_for_dereference_cnt = 0,
  948. .get_for_extraction_cnt = 0,
  949. // 2 items to compare to => 4 calls.
  950. .get_for_comparison_cnt = 4,
  951. // 1 element to compare to => 2 calls.
  952. .wrapped_ptr_less_cnt = 2,
  953. }),
  954. CountingRawPtrHasCounts());
  955. RawPtrCountingImpl::ClearCounters();
  956. set.count(&x);
  957. EXPECT_THAT((CountingRawPtrExpectations{
  958. .wrap_raw_ptr_cnt = 0,
  959. .get_for_dereference_cnt = 0,
  960. .get_for_extraction_cnt = 0,
  961. // 2 comparisons => 2 extractions. Less than before, because
  962. // this time a raw pointer is one side of the comparison.
  963. .get_for_comparison_cnt = 2,
  964. // 2 items to compare to => 4 calls.
  965. .wrapped_ptr_less_cnt = 2,
  966. }),
  967. CountingRawPtrHasCounts());
  968. RawPtrCountingImpl::ClearCounters();
  969. set.count(ptr);
  970. EXPECT_THAT((CountingRawPtrExpectations{
  971. .wrap_raw_ptr_cnt = 0,
  972. .get_for_dereference_cnt = 0,
  973. .get_for_extraction_cnt = 0,
  974. // 2 comparisons => 4 extractions.
  975. .get_for_comparison_cnt = 4,
  976. // 2 items to compare to => 4 calls.
  977. .wrapped_ptr_less_cnt = 2,
  978. }),
  979. CountingRawPtrHasCounts());
  980. }
  981. TEST_F(RawPtrTest, ComparisonOperatorUsesGetForComparison) {
  982. int x = 123;
  983. CountingRawPtr<int> ptr(&x);
  984. RawPtrCountingImpl::ClearCounters();
  985. EXPECT_FALSE(ptr < ptr);
  986. EXPECT_FALSE(ptr > ptr);
  987. EXPECT_TRUE(ptr <= ptr);
  988. EXPECT_TRUE(ptr >= ptr);
  989. EXPECT_THAT((CountingRawPtrExpectations{
  990. .wrap_raw_ptr_cnt = 0,
  991. .get_for_dereference_cnt = 0,
  992. .get_for_extraction_cnt = 0,
  993. .get_for_comparison_cnt = 8,
  994. // < is used directly, not std::less().
  995. .wrapped_ptr_less_cnt = 0,
  996. }),
  997. CountingRawPtrHasCounts());
  998. RawPtrCountingImpl::ClearCounters();
  999. EXPECT_FALSE(ptr < &x);
  1000. EXPECT_FALSE(ptr > &x);
  1001. EXPECT_TRUE(ptr <= &x);
  1002. EXPECT_TRUE(ptr >= &x);
  1003. EXPECT_THAT((CountingRawPtrExpectations{
  1004. .wrap_raw_ptr_cnt = 0,
  1005. .get_for_dereference_cnt = 0,
  1006. .get_for_extraction_cnt = 0,
  1007. .get_for_comparison_cnt = 4,
  1008. .wrapped_ptr_less_cnt = 0,
  1009. }),
  1010. CountingRawPtrHasCounts());
  1011. RawPtrCountingImpl::ClearCounters();
  1012. EXPECT_FALSE(&x < ptr);
  1013. EXPECT_FALSE(&x > ptr);
  1014. EXPECT_TRUE(&x <= ptr);
  1015. EXPECT_TRUE(&x >= ptr);
  1016. EXPECT_THAT((CountingRawPtrExpectations{
  1017. .wrap_raw_ptr_cnt = 0,
  1018. .get_for_dereference_cnt = 0,
  1019. .get_for_extraction_cnt = 0,
  1020. .get_for_comparison_cnt = 4,
  1021. .wrapped_ptr_less_cnt = 0,
  1022. }),
  1023. CountingRawPtrHasCounts());
  1024. }
  1025. // This test checks how the std library handles collections like
  1026. // std::vector<raw_ptr<T>>.
  1027. //
  1028. // When this test is written, reallocating std::vector's storage (e.g.
  1029. // when growing the vector) requires calling raw_ptr's destructor on the
  1030. // old storage (after std::move-ing the data to the new storage). In
  1031. // the future we hope that TRIVIAL_ABI (or [trivially_relocatable]]
  1032. // proposed by P1144 [1]) will allow memcpy-ing the elements into the
  1033. // new storage (without invoking destructors and move constructors
  1034. // and/or move assignment operators). At that point, the assert in the
  1035. // test should be modified to capture the new, better behavior.
  1036. //
  1037. // In the meantime, this test serves as a basic correctness test that
  1038. // ensures that raw_ptr<T> stored in a std::vector passes basic smoke
  1039. // tests.
  1040. //
  1041. // [1]
  1042. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2020/p1144r5.html#wording-attribute
  1043. TEST_F(RawPtrTest, TrivialRelocability) {
  1044. std::vector<CountingRawPtr<int>> vector;
  1045. int x = 123;
  1046. // See how many times raw_ptr's destructor is called when std::vector
  1047. // needs to increase its capacity and reallocate the internal vector
  1048. // storage (moving the raw_ptr elements).
  1049. RawPtrCountingImpl::ClearCounters();
  1050. size_t number_of_capacity_changes = 0;
  1051. do {
  1052. size_t previous_capacity = vector.capacity();
  1053. while (vector.capacity() == previous_capacity)
  1054. vector.emplace_back(&x);
  1055. number_of_capacity_changes++;
  1056. } while (number_of_capacity_changes < 10);
  1057. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  1058. // TODO(lukasza): In the future (once C++ language and std library
  1059. // support custom trivially relocatable objects) this #if branch can
  1060. // be removed (keeping only the right long-term expectation from the
  1061. // #else branch).
  1062. EXPECT_NE(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
  1063. #else
  1064. // This is the right long-term expectation.
  1065. //
  1066. // (This EXPECT_EQ assertion is slightly misleading in
  1067. // !USE_BACKUP_REF_PTR mode, because RawPtrNoOpImpl has a default
  1068. // destructor that doesn't go through
  1069. // RawPtrCountingImpl::ReleaseWrappedPtr. Nevertheless, the spirit of
  1070. // the EXPECT_EQ is correct + the assertion should be true in the
  1071. // long-term.)
  1072. EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
  1073. #endif
  1074. // Basic smoke test that raw_ptr elements in a vector work okay.
  1075. for (const auto& elem : vector) {
  1076. EXPECT_EQ(elem.get(), &x);
  1077. EXPECT_EQ(*elem, x);
  1078. }
  1079. // Verification that release_wrapped_ptr_cnt does capture how many times the
  1080. // destructors are called (e.g. that it is not always zero).
  1081. RawPtrCountingImpl::ClearCounters();
  1082. size_t number_of_cleared_elements = vector.size();
  1083. vector.clear();
  1084. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  1085. EXPECT_EQ((int)number_of_cleared_elements,
  1086. RawPtrCountingImpl::release_wrapped_ptr_cnt);
  1087. #else
  1088. // TODO(lukasza): !USE_BACKUP_REF_PTR / RawPtrNoOpImpl has a default
  1089. // destructor that doesn't go through
  1090. // RawPtrCountingImpl::ReleaseWrappedPtr. So we can't really depend
  1091. // on `g_release_wrapped_ptr_cnt`. This #else branch should be
  1092. // deleted once USE_BACKUP_REF_PTR is removed (e.g. once BackupRefPtr
  1093. // ships to the Stable channel).
  1094. EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
  1095. std::ignore = number_of_cleared_elements;
  1096. #endif
  1097. }
  1098. struct BaseStruct {
  1099. explicit BaseStruct(int a) : a(a) {}
  1100. virtual ~BaseStruct() = default;
  1101. int a;
  1102. };
  1103. struct DerivedType1 : public BaseStruct {
  1104. explicit DerivedType1(int a, int b) : BaseStruct(a), b(b) {}
  1105. int b;
  1106. };
  1107. struct DerivedType2 : public BaseStruct {
  1108. explicit DerivedType2(int a, int c) : BaseStruct(a), c(c) {}
  1109. int c;
  1110. };
  1111. TEST_F(RawPtrTest, DerivedStructsComparison) {
  1112. DerivedType1 derived_1(42, 84);
  1113. raw_ptr<DerivedType1> checked_derived1_ptr = &derived_1;
  1114. DerivedType2 derived_2(21, 10);
  1115. raw_ptr<DerivedType2> checked_derived2_ptr = &derived_2;
  1116. // Make sure that comparing a |DerivedType2*| to a |DerivedType1*| casted
  1117. // as a |BaseStruct*| doesn't cause CFI errors.
  1118. EXPECT_NE(checked_derived1_ptr,
  1119. static_cast<BaseStruct*>(checked_derived2_ptr.get()));
  1120. EXPECT_NE(static_cast<BaseStruct*>(checked_derived1_ptr.get()),
  1121. checked_derived2_ptr);
  1122. }
  1123. #if BUILDFLAG(ENABLE_BASE_TRACING)
  1124. TEST_F(RawPtrTest, TracedValueSupport) {
  1125. // Serialise nullptr.
  1126. EXPECT_EQ(perfetto::TracedValueToString(raw_ptr<int>()), "0x0");
  1127. {
  1128. // If the pointer is non-null, its dereferenced value will be serialised.
  1129. int value = 42;
  1130. EXPECT_EQ(perfetto::TracedValueToString(raw_ptr<int>(&value)), "42");
  1131. }
  1132. struct WithTraceSupport {
  1133. void WriteIntoTrace(perfetto::TracedValue ctx) const {
  1134. std::move(ctx).WriteString("result");
  1135. }
  1136. };
  1137. {
  1138. WithTraceSupport value;
  1139. EXPECT_EQ(perfetto::TracedValueToString(raw_ptr<WithTraceSupport>(&value)),
  1140. "result");
  1141. }
  1142. }
  1143. #endif // BUILDFLAG(ENABLE_BASE_TRACING)
  1144. class PmfTestBase {
  1145. public:
  1146. int MemFunc(char, double) const { return 11; }
  1147. };
  1148. class PmfTestDerived : public PmfTestBase {
  1149. public:
  1150. using PmfTestBase::MemFunc;
  1151. int MemFunc(float, double) { return 22; }
  1152. };
  1153. } // namespace
  1154. namespace base {
  1155. namespace internal {
  1156. #if BUILDFLAG(USE_BACKUP_REF_PTR) && !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
  1157. void HandleOOM(size_t unused_size) {
  1158. LOG(FATAL) << "Out of memory";
  1159. }
  1160. static constexpr partition_alloc::PartitionOptions kOpts = {
  1161. partition_alloc::PartitionOptions::AlignedAlloc::kDisallowed,
  1162. partition_alloc::PartitionOptions::ThreadCache::kDisabled,
  1163. partition_alloc::PartitionOptions::Quarantine::kDisallowed,
  1164. partition_alloc::PartitionOptions::Cookie::kAllowed,
  1165. partition_alloc::PartitionOptions::BackupRefPtr::kEnabled,
  1166. partition_alloc::PartitionOptions::BackupRefPtrZapping::kEnabled,
  1167. partition_alloc::PartitionOptions::UseConfigurablePool::kNo,
  1168. };
  1169. TEST(BackupRefPtrImpl, Basic) {
  1170. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1171. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1172. base::CPU cpu;
  1173. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1174. partition_alloc::PartitionAllocator allocator;
  1175. allocator.init(kOpts);
  1176. int* raw_ptr1 =
  1177. reinterpret_cast<int*>(allocator.root()->Alloc(sizeof(int), ""));
  1178. // Use the actual raw_ptr implementation, not a test substitute, to
  1179. // exercise real PartitionAlloc paths.
  1180. raw_ptr<int> wrapped_ptr1 = raw_ptr1;
  1181. *raw_ptr1 = 42;
  1182. EXPECT_EQ(*raw_ptr1, *wrapped_ptr1);
  1183. allocator.root()->Free(raw_ptr1);
  1184. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  1185. // In debug builds, the use-after-free should be caught immediately.
  1186. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
  1187. #else // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  1188. if (cpu.has_mte()) {
  1189. // If the hardware supports MTE, the use-after-free should also be caught.
  1190. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
  1191. } else {
  1192. // The allocation should be poisoned since there's a raw_ptr alive.
  1193. EXPECT_NE(*wrapped_ptr1, 42);
  1194. }
  1195. // The allocator should not be able to reuse the slot at this point.
  1196. void* raw_ptr2 = allocator.root()->Alloc(sizeof(int), "");
  1197. EXPECT_NE(partition_alloc::UntagPtr(raw_ptr1),
  1198. partition_alloc::UntagPtr(raw_ptr2));
  1199. allocator.root()->Free(raw_ptr2);
  1200. // When the last reference is released, the slot should become reusable.
  1201. wrapped_ptr1 = nullptr;
  1202. void* raw_ptr3 = allocator.root()->Alloc(sizeof(int), "");
  1203. EXPECT_EQ(partition_alloc::UntagPtr(raw_ptr1),
  1204. partition_alloc::UntagPtr(raw_ptr3));
  1205. allocator.root()->Free(raw_ptr3);
  1206. #endif // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  1207. }
  1208. TEST(BackupRefPtrImpl, ZeroSized) {
  1209. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1210. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1211. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1212. partition_alloc::PartitionAllocator allocator;
  1213. allocator.init(kOpts);
  1214. std::vector<raw_ptr<void>> ptrs;
  1215. // Use a reasonable number of elements to fill up the slot span.
  1216. for (int i = 0; i < 128 * 1024; ++i) {
  1217. // Constructing a raw_ptr instance from a zero-sized allocation should
  1218. // not result in a crash.
  1219. ptrs.emplace_back(allocator.root()->Alloc(0, ""));
  1220. }
  1221. }
  1222. TEST(BackupRefPtrImpl, EndPointer) {
  1223. // This test requires a fresh partition with an empty free list.
  1224. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1225. partition_alloc::PartitionAllocator allocator;
  1226. allocator.init(kOpts);
  1227. // Check multiple size buckets and levels of slot filling.
  1228. for (int size = 0; size < 1024; size += sizeof(void*)) {
  1229. // Creating a raw_ptr from an address right past the end of an allocation
  1230. // should not result in a crash or corrupt the free list.
  1231. char* raw_ptr1 = reinterpret_cast<char*>(allocator.root()->Alloc(size, ""));
  1232. raw_ptr<char> wrapped_ptr = raw_ptr1 + size;
  1233. wrapped_ptr = nullptr;
  1234. // We need to make two more allocations to turn the possible free list
  1235. // corruption into an observable crash.
  1236. char* raw_ptr2 = reinterpret_cast<char*>(allocator.root()->Alloc(size, ""));
  1237. char* raw_ptr3 = reinterpret_cast<char*>(allocator.root()->Alloc(size, ""));
  1238. // Similarly for operator+=.
  1239. char* raw_ptr4 = reinterpret_cast<char*>(allocator.root()->Alloc(size, ""));
  1240. wrapped_ptr = raw_ptr4;
  1241. wrapped_ptr += size;
  1242. wrapped_ptr = nullptr;
  1243. char* raw_ptr5 = reinterpret_cast<char*>(allocator.root()->Alloc(size, ""));
  1244. char* raw_ptr6 = reinterpret_cast<char*>(allocator.root()->Alloc(size, ""));
  1245. allocator.root()->Free(raw_ptr1);
  1246. allocator.root()->Free(raw_ptr2);
  1247. allocator.root()->Free(raw_ptr3);
  1248. allocator.root()->Free(raw_ptr4);
  1249. allocator.root()->Free(raw_ptr5);
  1250. allocator.root()->Free(raw_ptr6);
  1251. }
  1252. }
  1253. TEST(BackupRefPtrImpl, QuarantinedBytes) {
  1254. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1255. partition_alloc::PartitionAllocator allocator;
  1256. allocator.init(kOpts);
  1257. uint64_t* raw_ptr1 = reinterpret_cast<uint64_t*>(
  1258. allocator.root()->Alloc(sizeof(uint64_t), ""));
  1259. raw_ptr<uint64_t> wrapped_ptr1 = raw_ptr1;
  1260. EXPECT_EQ(allocator.root()->total_size_of_brp_quarantined_bytes.load(
  1261. std::memory_order_relaxed),
  1262. 0U);
  1263. EXPECT_EQ(allocator.root()->total_count_of_brp_quarantined_slots.load(
  1264. std::memory_order_relaxed),
  1265. 0U);
  1266. // Memory should get quarantined.
  1267. allocator.root()->Free(raw_ptr1);
  1268. EXPECT_GT(allocator.root()->total_size_of_brp_quarantined_bytes.load(
  1269. std::memory_order_relaxed),
  1270. 0U);
  1271. EXPECT_EQ(allocator.root()->total_count_of_brp_quarantined_slots.load(
  1272. std::memory_order_relaxed),
  1273. 1U);
  1274. // Non quarantined free should not effect total_size_of_brp_quarantined_bytes
  1275. void* raw_ptr2 = allocator.root()->Alloc(sizeof(uint64_t), "");
  1276. allocator.root()->Free(raw_ptr2);
  1277. // Freeing quarantined memory should bring the size back down to zero.
  1278. wrapped_ptr1 = nullptr;
  1279. EXPECT_EQ(allocator.root()->total_size_of_brp_quarantined_bytes.load(
  1280. std::memory_order_relaxed),
  1281. 0U);
  1282. EXPECT_EQ(allocator.root()->total_count_of_brp_quarantined_slots.load(
  1283. std::memory_order_relaxed),
  1284. 0U);
  1285. }
  1286. void RunBackupRefPtrImplAdvanceTest(
  1287. partition_alloc::PartitionAllocator& allocator,
  1288. size_t requested_size) {
  1289. raw_ptr<char> ptr =
  1290. static_cast<char*>(allocator.root()->Alloc(requested_size, ""));
  1291. ptr += 123;
  1292. ptr -= 123;
  1293. ptr += requested_size / 2;
  1294. ptr += requested_size / 2; // end-of-allocation address is ok
  1295. EXPECT_DEATH_IF_SUPPORTED(ptr += 1, "");
  1296. EXPECT_DEATH_IF_SUPPORTED(++ptr, "");
  1297. ptr -= requested_size / 2;
  1298. ptr -= requested_size / 2;
  1299. EXPECT_DEATH_IF_SUPPORTED(ptr -= 1, "");
  1300. EXPECT_DEATH_IF_SUPPORTED(--ptr, "");
  1301. allocator.root()->Free(ptr);
  1302. }
  1303. TEST(BackupRefPtrImpl, Advance) {
  1304. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1305. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1306. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1307. partition_alloc::PartitionAllocator allocator;
  1308. allocator.init(kOpts);
  1309. // This requires some internal PartitionAlloc knowledge, but for the test to
  1310. // work well the allocation + extras have to fill out the entire slot. That's
  1311. // because PartitionAlloc doesn't know exact allocation size and bases the
  1312. // guards on the slot size.
  1313. //
  1314. // A power of two is a safe choice for a slot size, then adjust it for extras.
  1315. size_t slot_size = 512;
  1316. size_t requested_size =
  1317. allocator.root()->AdjustSizeForExtrasSubtract(slot_size);
  1318. // Verify that we're indeed fillin up the slot.
  1319. ASSERT_EQ(
  1320. requested_size,
  1321. allocator.root()->AllocationCapacityFromRequestedSize(requested_size));
  1322. RunBackupRefPtrImplAdvanceTest(allocator, requested_size);
  1323. // We don't have the same worry for single-slot spans, as PartitionAlloc knows
  1324. // exactly where the allocation ends.
  1325. size_t raw_size = 300003;
  1326. ASSERT_GT(raw_size, partition_alloc::internal::MaxRegularSlotSpanSize());
  1327. ASSERT_LE(raw_size, partition_alloc::internal::kMaxBucketed);
  1328. requested_size = allocator.root()->AdjustSizeForExtrasSubtract(slot_size);
  1329. RunBackupRefPtrImplAdvanceTest(allocator, requested_size);
  1330. // Same for direct map.
  1331. raw_size = 1001001;
  1332. ASSERT_GT(raw_size, partition_alloc::internal::kMaxBucketed);
  1333. requested_size = allocator.root()->AdjustSizeForExtrasSubtract(slot_size);
  1334. RunBackupRefPtrImplAdvanceTest(allocator, requested_size);
  1335. }
  1336. #if defined(PA_REF_COUNT_CHECK_COOKIE)
  1337. TEST(BackupRefPtrImpl, ReinterpretCast) {
  1338. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1339. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1340. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1341. partition_alloc::PartitionAllocator allocator;
  1342. allocator.init(kOpts);
  1343. void* ptr = allocator.root()->Alloc(16, "");
  1344. allocator.root()->Free(ptr);
  1345. raw_ptr<void>* wrapped_ptr = reinterpret_cast<raw_ptr<void>*>(&ptr);
  1346. // The reference count cookie check should detect that the allocation has
  1347. // been already freed.
  1348. EXPECT_DEATH_IF_SUPPORTED(*wrapped_ptr = nullptr, "");
  1349. }
  1350. #endif
  1351. namespace {
  1352. // Install dangling raw_ptr handlers and restore them when going out of scope.
  1353. class ScopedInstallDanglingRawPtrChecks {
  1354. public:
  1355. ScopedInstallDanglingRawPtrChecks() {
  1356. enabled_feature_list_.InitWithFeaturesAndParameters(
  1357. {{features::kPartitionAllocDanglingPtr, {{"mode", "crash"}}}},
  1358. {/* disabled_features */});
  1359. old_detected_fn_ = partition_alloc::GetDanglingRawPtrDetectedFn();
  1360. old_dereferenced_fn_ = partition_alloc::GetDanglingRawPtrReleasedFn();
  1361. allocator::InstallDanglingRawPtrChecks();
  1362. }
  1363. ~ScopedInstallDanglingRawPtrChecks() {
  1364. partition_alloc::SetDanglingRawPtrDetectedFn(old_detected_fn_);
  1365. partition_alloc::SetDanglingRawPtrReleasedFn(old_dereferenced_fn_);
  1366. }
  1367. private:
  1368. test::ScopedFeatureList enabled_feature_list_;
  1369. partition_alloc::DanglingRawPtrDetectedFn* old_detected_fn_;
  1370. partition_alloc::DanglingRawPtrReleasedFn* old_dereferenced_fn_;
  1371. };
  1372. } // namespace
  1373. TEST(BackupRefPtrImpl, RawPtrMayDangle) {
  1374. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1375. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1376. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1377. partition_alloc::PartitionAllocator allocator;
  1378. allocator.init(kOpts);
  1379. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1380. void* ptr = allocator.root()->Alloc(16, "");
  1381. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr = ptr;
  1382. allocator.root()->Free(ptr); // No dangling raw_ptr reported.
  1383. dangling_ptr = nullptr; // No dangling raw_ptr reported.
  1384. }
  1385. TEST(BackupRefPtrImpl, RawPtrNotDangling) {
  1386. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1387. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1388. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1389. partition_alloc::PartitionAllocator allocator;
  1390. allocator.init(kOpts);
  1391. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1392. void* ptr = allocator.root()->Alloc(16, "");
  1393. raw_ptr<void> dangling_ptr = ptr;
  1394. #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
  1395. EXPECT_DEATH_IF_SUPPORTED(
  1396. {
  1397. allocator.root()->Free(ptr); // Dangling raw_ptr detected.
  1398. dangling_ptr = nullptr; // Dangling raw_ptr released.
  1399. },
  1400. AllOf(HasSubstr("Detected dangling raw_ptr"),
  1401. HasSubstr("The memory was freed at:"),
  1402. HasSubstr("The dangling raw_ptr was released at:")));
  1403. #endif
  1404. }
  1405. // Check the comparator operators work, even across raw_ptr with different
  1406. // dangling policies.
  1407. TEST(BackupRefPtrImpl, DanglingPtrComparison) {
  1408. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1409. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1410. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1411. partition_alloc::PartitionAllocator allocator;
  1412. allocator.init(kOpts);
  1413. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1414. void* ptr_1 = allocator.root()->Alloc(16, "");
  1415. void* ptr_2 = allocator.root()->Alloc(16, "");
  1416. if (ptr_1 > ptr_2)
  1417. std::swap(ptr_1, ptr_2);
  1418. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_1 = ptr_1;
  1419. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_2 = ptr_2;
  1420. raw_ptr<void> not_dangling_ptr_1 = ptr_1;
  1421. raw_ptr<void> not_dangling_ptr_2 = ptr_2;
  1422. EXPECT_EQ(dangling_ptr_1, not_dangling_ptr_1);
  1423. EXPECT_EQ(dangling_ptr_2, not_dangling_ptr_2);
  1424. EXPECT_NE(dangling_ptr_1, not_dangling_ptr_2);
  1425. EXPECT_NE(dangling_ptr_2, not_dangling_ptr_1);
  1426. EXPECT_LT(dangling_ptr_1, not_dangling_ptr_2);
  1427. EXPECT_GT(dangling_ptr_2, not_dangling_ptr_1);
  1428. EXPECT_LT(not_dangling_ptr_1, dangling_ptr_2);
  1429. EXPECT_GT(not_dangling_ptr_2, dangling_ptr_1);
  1430. not_dangling_ptr_1 = nullptr;
  1431. not_dangling_ptr_2 = nullptr;
  1432. allocator.root()->Free(ptr_1);
  1433. allocator.root()->Free(ptr_2);
  1434. }
  1435. // Check the assignment operator works, even across raw_ptr with different
  1436. // dangling policies.
  1437. TEST(BackupRefPtrImpl, DanglingPtrAssignment) {
  1438. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1439. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1440. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1441. partition_alloc::PartitionAllocator allocator;
  1442. allocator.init(kOpts);
  1443. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1444. void* ptr = allocator.root()->Alloc(16, "");
  1445. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_1;
  1446. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_2;
  1447. raw_ptr<void> not_dangling_ptr;
  1448. dangling_ptr_1 = ptr;
  1449. not_dangling_ptr = dangling_ptr_1;
  1450. dangling_ptr_1 = nullptr;
  1451. dangling_ptr_2 = not_dangling_ptr;
  1452. not_dangling_ptr = nullptr;
  1453. allocator.root()->Free(ptr);
  1454. dangling_ptr_1 = dangling_ptr_2;
  1455. dangling_ptr_2 = nullptr;
  1456. not_dangling_ptr = dangling_ptr_1;
  1457. dangling_ptr_1 = nullptr;
  1458. }
  1459. // Check the copy constructor works, even across raw_ptr with different dangling
  1460. // policies.
  1461. TEST(BackupRefPtrImpl, DanglingPtrCopyContructor) {
  1462. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1463. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1464. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1465. partition_alloc::PartitionAllocator allocator;
  1466. allocator.init(kOpts);
  1467. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1468. void* ptr = allocator.root()->Alloc(16, "");
  1469. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_1(ptr);
  1470. raw_ptr<void> not_dangling_ptr_1(ptr);
  1471. raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_2(not_dangling_ptr_1);
  1472. raw_ptr<void> not_dangling_ptr_2(dangling_ptr_1);
  1473. not_dangling_ptr_1 = nullptr;
  1474. not_dangling_ptr_2 = nullptr;
  1475. allocator.root()->Free(ptr);
  1476. }
  1477. TEST(BackupRefPtrImpl, RawPtrExtractAsDangling) {
  1478. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1479. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1480. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1481. partition_alloc::PartitionAllocator allocator;
  1482. allocator.init(kOpts);
  1483. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1484. raw_ptr<int> ptr =
  1485. static_cast<int*>(allocator.root()->Alloc(sizeof(int), ""));
  1486. allocator.root()->Free(
  1487. ptr.ExtractAsDangling()); // No dangling raw_ptr reported.
  1488. EXPECT_EQ(ptr, nullptr);
  1489. }
  1490. TEST(BackupRefPtrImpl, RawPtrDeleteWithoutExtractAsDangling) {
  1491. // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
  1492. // new/delete once PartitionAlloc Everywhere is fully enabled.
  1493. partition_alloc::PartitionAllocGlobalInit(HandleOOM);
  1494. partition_alloc::PartitionAllocator allocator;
  1495. allocator.init(kOpts);
  1496. ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
  1497. raw_ptr<int> ptr =
  1498. static_cast<int*>(allocator.root()->Alloc(sizeof(int), ""));
  1499. #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
  1500. EXPECT_DEATH_IF_SUPPORTED(
  1501. {
  1502. allocator.root()->Free(ptr.get()); // Dangling raw_ptr detected.
  1503. ptr = nullptr; // Dangling raw_ptr released.
  1504. },
  1505. AllOf(HasSubstr("Detected dangling raw_ptr"),
  1506. HasSubstr("The memory was freed at:"),
  1507. HasSubstr("The dangling raw_ptr was released at:")));
  1508. #endif
  1509. }
  1510. #endif // BUILDFLAG(USE_BACKUP_REF_PTR) &&
  1511. // !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
  1512. #if BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
  1513. struct AsanStruct {
  1514. int x;
  1515. void func() { ++x; }
  1516. };
  1517. #define ASAN_BRP_PROTECTED(x) "MiraclePtr Status: PROTECTED\\n.*" x
  1518. #define ASAN_BRP_MANUAL_ANALYSIS(x) \
  1519. "MiraclePtr Status: MANUAL ANALYSIS REQUIRED\\n.*" x
  1520. #define ASAN_BRP_NOT_PROTECTED(x) "MiraclePtr Status: NOT PROTECTED\\n.*" x
  1521. const char* kAsanBrpProtected_Dereference =
  1522. ASAN_BRP_PROTECTED("dangling pointer was being dereferenced");
  1523. const char* kAsanBrpMaybeProtected_Extraction = ASAN_BRP_MANUAL_ANALYSIS(
  1524. "pointer to the same region was extracted from a raw_ptr<T>");
  1525. const char* kAsanBrpNotProtected_Instantiation = ASAN_BRP_NOT_PROTECTED(
  1526. "pointer to an already freed region was assigned to a raw_ptr<T>");
  1527. const char* kAsanBrpNotProtected_EarlyAllocation = ASAN_BRP_NOT_PROTECTED(
  1528. "region was allocated before MiraclePtr was activated");
  1529. const char* kAsanBrpNotProtected_NoRawPtrAccess =
  1530. ASAN_BRP_NOT_PROTECTED("No raw_ptr<T> access to this region was detected");
  1531. #undef ASAN_BRP_PROTECTED
  1532. #undef ASAN_BRP_MANUAL_ANALYSIS
  1533. #undef ASAN_BRP_NOT_PROTECTED
  1534. TEST(AsanBackupRefPtrImpl, Dereference) {
  1535. if (RawPtrAsanService::GetInstance().mode() !=
  1536. RawPtrAsanService::Mode::kEnabled) {
  1537. base::RawPtrAsanService::GetInstance().Configure(
  1538. base::EnableDereferenceCheck(true), base::EnableExtractionCheck(true),
  1539. base::EnableInstantiationCheck(true));
  1540. } else {
  1541. ASSERT_TRUE(
  1542. base::RawPtrAsanService::GetInstance().is_dereference_check_enabled());
  1543. }
  1544. raw_ptr<AsanStruct> protected_ptr = new AsanStruct;
  1545. // The four statements below should succeed.
  1546. (*protected_ptr).x = 1;
  1547. (*protected_ptr).func();
  1548. ++(protected_ptr->x);
  1549. protected_ptr->func();
  1550. delete protected_ptr.get();
  1551. EXPECT_DEATH_IF_SUPPORTED((*protected_ptr).x = 1,
  1552. kAsanBrpProtected_Dereference);
  1553. EXPECT_DEATH_IF_SUPPORTED((*protected_ptr).func(),
  1554. kAsanBrpProtected_Dereference);
  1555. EXPECT_DEATH_IF_SUPPORTED(++(protected_ptr->x),
  1556. kAsanBrpProtected_Dereference);
  1557. EXPECT_DEATH_IF_SUPPORTED(protected_ptr->func(),
  1558. kAsanBrpProtected_Dereference);
  1559. }
  1560. TEST(AsanBackupRefPtrImpl, Extraction) {
  1561. if (RawPtrAsanService::GetInstance().mode() !=
  1562. RawPtrAsanService::Mode::kEnabled) {
  1563. base::RawPtrAsanService::GetInstance().Configure(
  1564. base::EnableDereferenceCheck(true), base::EnableExtractionCheck(true),
  1565. base::EnableInstantiationCheck(true));
  1566. } else {
  1567. ASSERT_TRUE(
  1568. base::RawPtrAsanService::GetInstance().is_extraction_check_enabled());
  1569. }
  1570. raw_ptr<AsanStruct> protected_ptr = new AsanStruct;
  1571. AsanStruct* ptr1 = protected_ptr; // Shouldn't crash.
  1572. ptr1->x = 0;
  1573. delete protected_ptr.get();
  1574. EXPECT_DEATH_IF_SUPPORTED(
  1575. {
  1576. AsanStruct* ptr2 = protected_ptr;
  1577. ptr2->x = 1;
  1578. },
  1579. kAsanBrpMaybeProtected_Extraction);
  1580. }
  1581. TEST(AsanBackupRefPtrImpl, Instantiation) {
  1582. if (RawPtrAsanService::GetInstance().mode() !=
  1583. RawPtrAsanService::Mode::kEnabled) {
  1584. base::RawPtrAsanService::GetInstance().Configure(
  1585. base::EnableDereferenceCheck(true), base::EnableExtractionCheck(true),
  1586. base::EnableInstantiationCheck(true));
  1587. } else {
  1588. ASSERT_TRUE(base::RawPtrAsanService::GetInstance()
  1589. .is_instantiation_check_enabled());
  1590. }
  1591. AsanStruct* ptr = new AsanStruct;
  1592. raw_ptr<AsanStruct> protected_ptr1 = ptr; // Shouldn't crash.
  1593. protected_ptr1 = nullptr;
  1594. delete ptr;
  1595. EXPECT_DEATH_IF_SUPPORTED(
  1596. { [[maybe_unused]] raw_ptr<AsanStruct> protected_ptr2 = ptr; },
  1597. kAsanBrpNotProtected_Instantiation);
  1598. }
  1599. TEST(AsanBackupRefPtrImpl, InstantiationInvalidPointer) {
  1600. if (RawPtrAsanService::GetInstance().mode() !=
  1601. RawPtrAsanService::Mode::kEnabled) {
  1602. base::RawPtrAsanService::GetInstance().Configure(
  1603. base::EnableDereferenceCheck(true), base::EnableExtractionCheck(true),
  1604. base::EnableInstantiationCheck(true));
  1605. } else {
  1606. ASSERT_TRUE(base::RawPtrAsanService::GetInstance()
  1607. .is_instantiation_check_enabled());
  1608. }
  1609. void* ptr1 = reinterpret_cast<void*>(0xfefefefefefefefe);
  1610. [[maybe_unused]] raw_ptr<void> protected_ptr1 = ptr1; // Shouldn't crash.
  1611. size_t shadow_scale, shadow_offset;
  1612. __asan_get_shadow_mapping(&shadow_scale, &shadow_offset);
  1613. [[maybe_unused]] raw_ptr<void> protected_ptr2 =
  1614. reinterpret_cast<void*>(shadow_offset); // Shouldn't crash.
  1615. }
  1616. TEST(AsanBackupRefPtrImpl, UserPoisoned) {
  1617. if (RawPtrAsanService::GetInstance().mode() !=
  1618. RawPtrAsanService::Mode::kEnabled) {
  1619. base::RawPtrAsanService::GetInstance().Configure(
  1620. base::EnableDereferenceCheck(true), base::EnableExtractionCheck(true),
  1621. base::EnableInstantiationCheck(true));
  1622. } else {
  1623. ASSERT_TRUE(
  1624. base::RawPtrAsanService::GetInstance().is_dereference_check_enabled());
  1625. }
  1626. AsanStruct* ptr = new AsanStruct;
  1627. __asan_poison_memory_region(ptr, sizeof(AsanStruct));
  1628. [[maybe_unused]] raw_ptr<AsanStruct> protected_ptr1 =
  1629. ptr; // Shouldn't crash.
  1630. delete ptr; // Should crash now.
  1631. EXPECT_DEATH_IF_SUPPORTED(
  1632. { [[maybe_unused]] raw_ptr<AsanStruct> protected_ptr2 = ptr; },
  1633. kAsanBrpNotProtected_Instantiation);
  1634. }
  1635. TEST(AsanBackupRefPtrImpl, EarlyAllocationDetection) {
  1636. if (RawPtrAsanService::GetInstance().mode() ==
  1637. RawPtrAsanService::Mode::kEnabled) {
  1638. // There's no way to reset sanitizer allocator hooks and, consequently, to
  1639. // reset BRP-ASan to the pre-startup state. Hence, exit early.
  1640. return;
  1641. }
  1642. raw_ptr<AsanStruct> unsafe_ptr = new AsanStruct;
  1643. base::RawPtrAsanService::GetInstance().Configure(
  1644. base::EnableDereferenceCheck(true), base::EnableExtractionCheck(true),
  1645. base::EnableInstantiationCheck(true));
  1646. raw_ptr<AsanStruct> safe_ptr = new AsanStruct;
  1647. EXPECT_FALSE(
  1648. RawPtrAsanService::GetInstance().IsSupportedAllocation(unsafe_ptr.get()));
  1649. EXPECT_TRUE(
  1650. RawPtrAsanService::GetInstance().IsSupportedAllocation(safe_ptr.get()));
  1651. delete safe_ptr.get();
  1652. delete unsafe_ptr.get();
  1653. EXPECT_FALSE(
  1654. RawPtrAsanService::GetInstance().IsSupportedAllocation(unsafe_ptr.get()));
  1655. EXPECT_TRUE(
  1656. RawPtrAsanService::GetInstance().IsSupportedAllocation(safe_ptr.get()));
  1657. EXPECT_DEATH_IF_SUPPORTED({ unsafe_ptr->func(); },
  1658. kAsanBrpNotProtected_EarlyAllocation);
  1659. EXPECT_DEATH_IF_SUPPORTED({ safe_ptr->func(); },
  1660. kAsanBrpProtected_Dereference);
  1661. }
  1662. #endif // BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
  1663. #if defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  1664. static constexpr size_t kTagOffsetForTest = 2;
  1665. struct MTECheckedPtrImplPartitionAllocSupportForTest {
  1666. static bool EnabledForPtr(void* ptr) { return !!ptr; }
  1667. static ALWAYS_INLINE void* TagPointer(uintptr_t ptr) {
  1668. return reinterpret_cast<void*>(ptr - kTagOffsetForTest);
  1669. }
  1670. };
  1671. using MTECheckedPtrImplForTest =
  1672. MTECheckedPtrImpl<MTECheckedPtrImplPartitionAllocSupportForTest>;
  1673. TEST(MTECheckedPtrImpl, WrapAndSafelyUnwrap) {
  1674. // Create a fake allocation, with first 2B for tag.
  1675. // It is ok to use a fake allocation, instead of PartitionAlloc, because
  1676. // MTECheckedPtrImplForTest fakes the functionality is enabled for this
  1677. // pointer and points to the tag appropriately.
  1678. unsigned char bytes[] = {0xBA, 0x42, 0x78, 0x89};
  1679. void* ptr = bytes + kTagOffsetForTest;
  1680. ASSERT_EQ(0x78, *static_cast<char*>(ptr));
  1681. uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
  1682. uintptr_t mask = 0xFFFFFFFFFFFFFFFF;
  1683. if (sizeof(partition_alloc::PartitionTag) < 2)
  1684. mask = 0x00FFFFFFFFFFFFFF;
  1685. uintptr_t wrapped =
  1686. reinterpret_cast<uintptr_t>(MTECheckedPtrImplForTest::WrapRawPtr(ptr));
  1687. // The bytes before the allocation will be used as tag (in reverse
  1688. // order due to little-endianness).
  1689. ASSERT_EQ(wrapped, (addr | 0x42BA000000000000) & mask);
  1690. ASSERT_EQ(MTECheckedPtrImplForTest::SafelyUnwrapPtrForDereference(
  1691. reinterpret_cast<void*>(wrapped)),
  1692. ptr);
  1693. // Modify the tag in the fake allocation.
  1694. bytes[0] |= 0x40;
  1695. wrapped =
  1696. reinterpret_cast<uintptr_t>(MTECheckedPtrImplForTest::WrapRawPtr(ptr));
  1697. ASSERT_EQ(wrapped, (addr | 0x42FA000000000000) & mask);
  1698. ASSERT_EQ(MTECheckedPtrImplForTest::SafelyUnwrapPtrForDereference(
  1699. reinterpret_cast<void*>(wrapped)),
  1700. ptr);
  1701. }
  1702. TEST(MTECheckedPtrImpl, SafelyUnwrapDisabled) {
  1703. // Create a fake allocation, with first 2B for tag.
  1704. // It is ok to use a fake allocation, instead of PartitionAlloc, because
  1705. // MTECheckedPtrImplForTest fakes the functionality is enabled for this
  1706. // pointer and points to the tag appropriately.
  1707. unsigned char bytes[] = {0xBA, 0x42, 0x78, 0x89};
  1708. unsigned char* ptr = bytes + kTagOffsetForTest;
  1709. ASSERT_EQ(0x78, *ptr);
  1710. ASSERT_EQ(MTECheckedPtrImplForTest::SafelyUnwrapPtrForDereference(ptr), ptr);
  1711. }
  1712. TEST(MTECheckedPtrImpl, CrashOnTagMismatch) {
  1713. // Create a fake allocation, using the first two bytes for the tag.
  1714. // It is ok to use a fake allocation, instead of PartitionAlloc, because
  1715. // MTECheckedPtrImplForTest fakes the functionality is enabled for this
  1716. // pointer and points to the tag appropriately.
  1717. unsigned char bytes[] = {0xBA, 0x42, 0x78, 0x89};
  1718. unsigned char* ptr =
  1719. MTECheckedPtrImplForTest::WrapRawPtr(bytes + kTagOffsetForTest);
  1720. EXPECT_EQ(*MTECheckedPtrImplForTest::SafelyUnwrapPtrForDereference(ptr),
  1721. 0x78);
  1722. // Clobber the tag associated with the fake allocation.
  1723. bytes[0] = 0;
  1724. EXPECT_DEATH_IF_SUPPORTED(
  1725. if (*MTECheckedPtrImplForTest::SafelyUnwrapPtrForDereference(ptr) ==
  1726. 0x78) return,
  1727. "");
  1728. }
  1729. #if !defined(MEMORY_TOOL_REPLACES_ALLOCATOR) && \
  1730. BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
  1731. // This test works only when PartitionAlloc is used, when tags are enabled.
  1732. // Don't enable it when MEMORY_TOOL_REPLACES_ALLOCATOR is defined, because it
  1733. // makes PartitionAlloc take a different path that doesn't provide tags, thus no
  1734. // crash on UaF, thus missing the EXPECT_DEATH_IF_SUPPORTED expectation.
  1735. TEST(MTECheckedPtrImpl, CrashOnUseAfterFree) {
  1736. int* unwrapped_ptr = new int;
  1737. // Use the actual CheckedPtr implementation, not a test substitute, to
  1738. // exercise real PartitionAlloc paths.
  1739. raw_ptr<int> ptr = unwrapped_ptr;
  1740. *ptr = 42;
  1741. EXPECT_EQ(*ptr, 42);
  1742. delete unwrapped_ptr;
  1743. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptr, "");
  1744. }
  1745. TEST(MTECheckedPtrImpl, CrashOnUseAfterFree_WithOffset) {
  1746. const uint8_t kSize = 100;
  1747. uint8_t* unwrapped_ptr = new uint8_t[kSize];
  1748. // Use the actual CheckedPtr implementation, not a test substitute, to
  1749. // exercise real PartitionAlloc paths.
  1750. raw_ptr<uint8_t> ptrs[kSize];
  1751. for (uint8_t i = 0; i < kSize; ++i) {
  1752. ptrs[i] = static_cast<uint8_t*>(unwrapped_ptr) + i;
  1753. }
  1754. for (uint8_t i = 0; i < kSize; ++i) {
  1755. *ptrs[i] = 42 + i;
  1756. EXPECT_TRUE(*ptrs[i] == 42 + i);
  1757. }
  1758. delete[] unwrapped_ptr;
  1759. for (uint8_t i = 0; i < kSize; i += 15) {
  1760. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptrs[i], "");
  1761. }
  1762. }
  1763. TEST(MTECheckedPtrImpl, DirectMapCrashOnUseAfterFree) {
  1764. // Alloc two super pages' worth of ints.
  1765. constexpr size_t kIntsPerSuperPage =
  1766. partition_alloc::kSuperPageSize / sizeof(int);
  1767. constexpr size_t kAllocAmount = kIntsPerSuperPage * 2;
  1768. int* unwrapped_ptr = new int[kAllocAmount];
  1769. ASSERT_TRUE(partition_alloc::internal::IsManagedByDirectMap(
  1770. partition_alloc::UntagPtr(unwrapped_ptr)));
  1771. // Use the actual CheckedPtr implementation, not a test substitute, to
  1772. // exercise real PartitionAlloc paths.
  1773. raw_ptr<int> ptr = unwrapped_ptr;
  1774. *ptr = 42;
  1775. EXPECT_EQ(*ptr, 42);
  1776. *(ptr + kIntsPerSuperPage) = 42;
  1777. EXPECT_EQ(*(ptr + kIntsPerSuperPage), 42);
  1778. *(ptr + kAllocAmount - 1) = 42;
  1779. EXPECT_EQ(*(ptr + kAllocAmount - 1), 42);
  1780. delete[] unwrapped_ptr;
  1781. EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptr, "");
  1782. }
  1783. TEST(MTECheckedPtrImpl, AdvancedPointerShiftedAppropriately) {
  1784. uint64_t* unwrapped_ptr = new uint64_t[6];
  1785. CountingRawPtr<uint64_t> ptr = unwrapped_ptr;
  1786. // This is a non-fixture test, so we need to unset all
  1787. // counters manually.
  1788. RawPtrCountingImpl::ClearCounters();
  1789. // This is unwrapped, but still useful for ensuring that the
  1790. // shift is sized in `uint64_t`s.
  1791. auto original_addr = reinterpret_cast<uintptr_t>(ptr.get());
  1792. EXPECT_THAT((CountingRawPtrExpectations{
  1793. .get_for_dereference_cnt = 0,
  1794. .get_for_extraction_cnt = 1,
  1795. }),
  1796. CountingRawPtrHasCounts());
  1797. ptr += 5;
  1798. EXPECT_EQ(reinterpret_cast<uintptr_t>(ptr.get()) - original_addr,
  1799. 5 * sizeof(uint64_t));
  1800. EXPECT_THAT((CountingRawPtrExpectations{
  1801. .get_for_dereference_cnt = 0,
  1802. .get_for_extraction_cnt = 2,
  1803. }),
  1804. CountingRawPtrHasCounts());
  1805. delete[] unwrapped_ptr;
  1806. EXPECT_DEATH_IF_SUPPORTED(*ptr, "");
  1807. // We assert that no visible extraction actually took place.
  1808. EXPECT_THAT((CountingRawPtrExpectations{
  1809. .get_for_dereference_cnt = 0,
  1810. .get_for_extraction_cnt = 2,
  1811. }),
  1812. CountingRawPtrHasCounts());
  1813. }
  1814. #endif // !defined(MEMORY_TOOL_REPLACES_ALLOCATOR) &&
  1815. // BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
  1816. #endif // defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  1817. } // namespace internal
  1818. } // namespace base