raw_ptr.h 52 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356
  1. // Copyright 2020 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #ifndef BASE_MEMORY_RAW_PTR_H_
  5. #define BASE_MEMORY_RAW_PTR_H_
  6. #include <stddef.h>
  7. #include <stdint.h>
  8. #include <climits>
  9. #include <cstddef>
  10. #include <functional>
  11. #include <type_traits>
  12. #include <utility>
  13. #include "base/allocator/buildflags.h"
  14. #include "base/allocator/partition_allocator/partition_alloc_config.h"
  15. #include "base/check.h"
  16. #include "base/compiler_specific.h"
  17. #include "base/dcheck_is_on.h"
  18. #include "base/trace_event/base_tracing_forward.h"
  19. #include "build/build_config.h"
  20. #include "build/buildflag.h"
  21. #if BUILDFLAG(USE_BACKUP_REF_PTR) || \
  22. defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  23. // USE_BACKUP_REF_PTR implies USE_PARTITION_ALLOC, needed for code under
  24. // allocator/partition_allocator/ to be built.
  25. #include "base/allocator/partition_allocator/address_pool_manager_bitmap.h"
  26. #include "base/allocator/partition_allocator/partition_address_space.h"
  27. #include "base/allocator/partition_allocator/partition_alloc_constants.h"
  28. #include "base/base_export.h"
  29. #endif // BUILDFLAG(USE_BACKUP_REF_PTR) ||
  30. // defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  31. #if defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  32. #include "base/allocator/partition_allocator/partition_tag.h"
  33. #include "base/allocator/partition_allocator/partition_tag_types.h"
  34. #include "base/allocator/partition_allocator/tagging.h"
  35. #include "base/check_op.h"
  36. #endif // defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  37. #if BUILDFLAG(IS_WIN)
  38. #include "base/win/win_handle_types.h"
  39. #endif
  40. namespace cc {
  41. class Scheduler;
  42. }
  43. namespace base::internal {
  44. class DelayTimerBase;
  45. }
  46. namespace content::responsiveness {
  47. class Calculator;
  48. }
  49. namespace base {
  50. // NOTE: All methods should be `ALWAYS_INLINE`. raw_ptr is meant to be a
  51. // lightweight replacement of a raw pointer, hence performance is critical.
  52. namespace internal {
  53. // These classes/structures are part of the raw_ptr implementation.
  54. // DO NOT USE THESE CLASSES DIRECTLY YOURSELF.
  55. // This type trait verifies a type can be used as a pointer offset.
  56. //
  57. // We support pointer offsets in signed (ptrdiff_t) or unsigned (size_t) values.
  58. // Smaller types are also allowed.
  59. template <typename Z>
  60. static constexpr bool offset_type =
  61. std::is_integral_v<Z> && sizeof(Z) <= sizeof(ptrdiff_t);
  62. struct RawPtrNoOpImpl {
  63. // Wraps a pointer.
  64. template <typename T>
  65. static ALWAYS_INLINE T* WrapRawPtr(T* ptr) {
  66. return ptr;
  67. }
  68. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  69. template <typename T>
  70. static ALWAYS_INLINE void ReleaseWrappedPtr(T*) {}
  71. // Unwraps the pointer, while asserting that memory hasn't been freed. The
  72. // function is allowed to crash on nullptr.
  73. template <typename T>
  74. static ALWAYS_INLINE T* SafelyUnwrapPtrForDereference(T* wrapped_ptr) {
  75. return wrapped_ptr;
  76. }
  77. // Unwraps the pointer, while asserting that memory hasn't been freed. The
  78. // function must handle nullptr gracefully.
  79. template <typename T>
  80. static ALWAYS_INLINE T* SafelyUnwrapPtrForExtraction(T* wrapped_ptr) {
  81. return wrapped_ptr;
  82. }
  83. // Unwraps the pointer, without making an assertion on whether memory was
  84. // freed or not.
  85. template <typename T>
  86. static ALWAYS_INLINE T* UnsafelyUnwrapPtrForComparison(T* wrapped_ptr) {
  87. return wrapped_ptr;
  88. }
  89. // Upcasts the wrapped pointer.
  90. template <typename To, typename From>
  91. static ALWAYS_INLINE constexpr To* Upcast(From* wrapped_ptr) {
  92. static_assert(std::is_convertible<From*, To*>::value,
  93. "From must be convertible to To.");
  94. // Note, this cast may change the address if upcasting to base that lies in
  95. // the middle of the derived object.
  96. return wrapped_ptr;
  97. }
  98. // Advance the wrapped pointer by `delta_elems`.
  99. template <typename T,
  100. typename Z,
  101. typename = std::enable_if_t<offset_type<Z>, void>>
  102. static ALWAYS_INLINE T* Advance(T* wrapped_ptr, Z delta_elems) {
  103. return wrapped_ptr + delta_elems;
  104. }
  105. // Returns a copy of a wrapped pointer, without making an assertion on whether
  106. // memory was freed or not.
  107. template <typename T>
  108. static ALWAYS_INLINE T* Duplicate(T* wrapped_ptr) {
  109. return wrapped_ptr;
  110. }
  111. // This is for accounting only, used by unit tests.
  112. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  113. static ALWAYS_INLINE void IncrementLessCountForTest() {}
  114. static ALWAYS_INLINE void IncrementPointerToMemberOperatorCountForTest() {}
  115. };
  116. #if defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  117. constexpr int kValidAddressBits = 48;
  118. constexpr uintptr_t kAddressMask = (1ull << kValidAddressBits) - 1;
  119. constexpr int kTagBits = sizeof(uintptr_t) * 8 - kValidAddressBits;
  120. // MTECheckedPtr has no business with the topmost bits reserved for the
  121. // tag used by true ARM MTE, so we strip it out here.
  122. constexpr uintptr_t kTagMask =
  123. ~kAddressMask & partition_alloc::internal::kPtrUntagMask;
  124. constexpr int kTopBitShift = 63;
  125. constexpr uintptr_t kTopBit = 1ull << kTopBitShift;
  126. static_assert(kTopBit << 1 == 0, "kTopBit should really be the top bit");
  127. static_assert((kTopBit & kTagMask) > 0,
  128. "kTopBit bit must be inside the tag region");
  129. // This functionality is outside of MTECheckedPtrImpl, so that it can be
  130. // overridden by tests.
  131. struct MTECheckedPtrImplPartitionAllocSupport {
  132. // Checks if the necessary support is enabled in PartitionAlloc for `ptr`.
  133. template <typename T>
  134. static ALWAYS_INLINE bool EnabledForPtr(T* ptr) {
  135. // Disambiguation: UntagPtr removes the hardware MTE tag, whereas this class
  136. // is responsible for handling the software MTE tag.
  137. auto addr = partition_alloc::UntagPtr(ptr);
  138. return partition_alloc::IsManagedByPartitionAlloc(addr);
  139. }
  140. // Returns pointer to the tag that protects are pointed by |addr|.
  141. static ALWAYS_INLINE void* TagPointer(uintptr_t addr) {
  142. return partition_alloc::PartitionTagPointer(addr);
  143. }
  144. };
  145. template <typename PartitionAllocSupport>
  146. struct MTECheckedPtrImpl {
  147. // This implementation assumes that pointers are 64 bits long and at least 16
  148. // top bits are unused. The latter is harder to verify statically, but this is
  149. // true for all currently supported 64-bit architectures (DCHECK when wrapping
  150. // will verify that).
  151. static_assert(sizeof(void*) >= 8, "Need 64-bit pointers");
  152. // Wraps a pointer, and returns its uintptr_t representation.
  153. template <typename T>
  154. static ALWAYS_INLINE T* WrapRawPtr(T* ptr) {
  155. // Disambiguation: UntagPtr removes the hardware MTE tag, whereas this
  156. // function is responsible for adding the software MTE tag.
  157. uintptr_t addr = partition_alloc::UntagPtr(ptr);
  158. DCHECK_EQ(ExtractTag(addr), 0ull);
  159. // Return a not-wrapped |addr|, if it's either nullptr or if the protection
  160. // for this pointer is disabled.
  161. if (!PartitionAllocSupport::EnabledForPtr(ptr)) {
  162. return ptr;
  163. }
  164. // Read the tag and place it in the top bits of the address.
  165. // Even if PartitionAlloc's tag has less than kTagBits, we'll read
  166. // what's given and pad the rest with 0s.
  167. static_assert(sizeof(partition_alloc::PartitionTag) * 8 <= kTagBits, "");
  168. uintptr_t tag = *(static_cast<volatile partition_alloc::PartitionTag*>(
  169. PartitionAllocSupport::TagPointer(addr)));
  170. DCHECK(tag);
  171. tag <<= kValidAddressBits;
  172. addr |= tag;
  173. // See the disambiguation comment above.
  174. // TODO(kdlee): Ensure that ptr's hardware MTE tag is preserved.
  175. // TODO(kdlee): Ensure that hardware and software MTE tags don't conflict.
  176. return static_cast<T*>(partition_alloc::internal::TagAddr(addr));
  177. }
  178. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  179. // No-op for MTECheckedPtrImpl.
  180. template <typename T>
  181. static ALWAYS_INLINE void ReleaseWrappedPtr(T*) {}
  182. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  183. // hasn't been freed. The function is allowed to crash on nullptr.
  184. template <typename T>
  185. static ALWAYS_INLINE T* SafelyUnwrapPtrForDereference(T* wrapped_ptr) {
  186. // Disambiguation: UntagPtr removes the hardware MTE tag, whereas this
  187. // function is responsible for removing the software MTE tag.
  188. uintptr_t wrapped_addr = partition_alloc::UntagPtr(wrapped_ptr);
  189. uintptr_t tag = ExtractTag(wrapped_addr);
  190. if (tag > 0) {
  191. // Read the tag provided by PartitionAlloc.
  192. //
  193. // Cast to volatile to ensure memory is read. E.g. in a tight loop, the
  194. // compiler could cache the value in a register and thus could miss that
  195. // another thread freed memory and changed tag.
  196. uintptr_t read_tag =
  197. *static_cast<volatile partition_alloc::PartitionTag*>(
  198. PartitionAllocSupport::TagPointer(ExtractAddress(wrapped_addr)));
  199. if (UNLIKELY(tag != read_tag))
  200. IMMEDIATE_CRASH();
  201. // See the disambiguation comment above.
  202. // TODO(kdlee): Ensure that ptr's hardware MTE tag is preserved.
  203. // TODO(kdlee): Ensure that hardware and software MTE tags don't conflict.
  204. return static_cast<T*>(
  205. partition_alloc::internal::TagAddr(ExtractAddress(wrapped_addr)));
  206. }
  207. return wrapped_ptr;
  208. }
  209. // Unwraps the pointer's uintptr_t representation, while asserting that memory
  210. // hasn't been freed. The function must handle nullptr gracefully.
  211. template <typename T>
  212. static ALWAYS_INLINE T* SafelyUnwrapPtrForExtraction(T* wrapped_ptr) {
  213. // SafelyUnwrapPtrForDereference handles nullptr case well.
  214. return SafelyUnwrapPtrForDereference(wrapped_ptr);
  215. }
  216. // Unwraps the pointer's uintptr_t representation, without making an assertion
  217. // on whether memory was freed or not.
  218. template <typename T>
  219. static ALWAYS_INLINE T* UnsafelyUnwrapPtrForComparison(T* wrapped_ptr) {
  220. return ExtractPtr(wrapped_ptr);
  221. }
  222. // Upcasts the wrapped pointer.
  223. template <typename To, typename From>
  224. static ALWAYS_INLINE constexpr To* Upcast(From* wrapped_ptr) {
  225. static_assert(std::is_convertible<From*, To*>::value,
  226. "From must be convertible to To.");
  227. // The top-bit tag must not affect the result of upcast.
  228. return static_cast<To*>(wrapped_ptr);
  229. }
  230. // Advance the wrapped pointer by `delta_elems`.
  231. template <typename T,
  232. typename Z,
  233. typename = std::enable_if_t<offset_type<Z>, void>>
  234. static ALWAYS_INLINE T* Advance(T* wrapped_ptr, Z delta_elems) {
  235. return wrapped_ptr + delta_elems;
  236. }
  237. // Returns a copy of a wrapped pointer, without making an assertion
  238. // on whether memory was freed or not.
  239. template <typename T>
  240. static ALWAYS_INLINE T* Duplicate(T* wrapped_ptr) {
  241. return wrapped_ptr;
  242. }
  243. // This is for accounting only, used by unit tests.
  244. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  245. static ALWAYS_INLINE void IncrementLessCountForTest() {}
  246. static ALWAYS_INLINE void IncrementPointerToMemberOperatorCountForTest() {}
  247. private:
  248. static ALWAYS_INLINE uintptr_t ExtractAddress(uintptr_t wrapped_ptr) {
  249. return wrapped_ptr & kAddressMask;
  250. }
  251. template <typename T>
  252. static ALWAYS_INLINE T* ExtractPtr(T* wrapped_ptr) {
  253. // Disambiguation: UntagPtr/TagAddr handle the hardware MTE tag, whereas
  254. // this function is responsible for removing the software MTE tag.
  255. // TODO(kdlee): Ensure that wrapped_ptr's hardware MTE tag is preserved.
  256. // TODO(kdlee): Ensure that hardware and software MTE tags don't conflict.
  257. return static_cast<T*>(partition_alloc::internal::TagAddr(
  258. ExtractAddress(partition_alloc::UntagPtr(wrapped_ptr))));
  259. }
  260. static ALWAYS_INLINE uintptr_t ExtractTag(uintptr_t wrapped_ptr) {
  261. return (wrapped_ptr & kTagMask) >> kValidAddressBits;
  262. }
  263. };
  264. #endif // defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  265. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  266. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  267. BASE_EXPORT void CheckThatAddressIsntWithinFirstPartitionPage(
  268. uintptr_t address);
  269. #endif
  270. template <bool AllowDangling = false>
  271. struct BackupRefPtrImpl {
  272. // Note that `BackupRefPtrImpl` itself is not thread-safe. If multiple threads
  273. // modify the same smart pointer object without synchronization, a data race
  274. // will occur.
  275. static ALWAYS_INLINE bool IsSupportedAndNotNull(uintptr_t address) {
  276. // This covers the nullptr case, as address 0 is never in GigaCage.
  277. bool is_in_brp_pool =
  278. partition_alloc::IsManagedByPartitionAllocBRPPool(address);
  279. // There are many situations where the compiler can prove that
  280. // ReleaseWrappedPtr is called on a value that is always nullptr, but the
  281. // way the check above is written, the compiler can't prove that nullptr is
  282. // not managed by PartitionAlloc; and so the compiler has to emit a useless
  283. // check and dead code.
  284. // To avoid that without making the runtime check slower, explicitly promise
  285. // to the compiler that is_in_brp_pool will always be false for nullptr.
  286. //
  287. // This condition would look nicer and might also theoretically be nicer for
  288. // the optimizer if it was written as "if (!address) { ... }", but
  289. // LLVM currently has issues with optimizing that away properly; see:
  290. // https://bugs.llvm.org/show_bug.cgi?id=49403
  291. // https://reviews.llvm.org/D97848
  292. // https://chromium-review.googlesource.com/c/chromium/src/+/2727400/2/base/memory/checked_ptr.h#120
  293. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  294. CHECK(address || !is_in_brp_pool);
  295. #endif
  296. #if HAS_BUILTIN(__builtin_assume)
  297. __builtin_assume(address || !is_in_brp_pool);
  298. #endif
  299. // There may be pointers immediately after the allocation, e.g.
  300. // {
  301. // // Assume this allocation happens outside of PartitionAlloc.
  302. // raw_ptr<T> ptr = new T[20];
  303. // for (size_t i = 0; i < 20; i ++) { ptr++; }
  304. // }
  305. //
  306. // Such pointers are *not* at risk of accidentally falling into BRP pool,
  307. // because:
  308. // 1) On 64-bit systems, BRP pool is preceded by a forbidden region.
  309. // 2) On 32-bit systems, the guard pages and metadata of super pages in BRP
  310. // pool aren't considered to be part of that pool.
  311. //
  312. // This allows us to make a stronger assertion that if
  313. // IsManagedByPartitionAllocBRPPool returns true for a valid pointer,
  314. // it must be at least partition page away from the beginning of a super
  315. // page.
  316. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  317. if (is_in_brp_pool) {
  318. CheckThatAddressIsntWithinFirstPartitionPage(address);
  319. }
  320. #endif
  321. return is_in_brp_pool;
  322. }
  323. // Wraps a pointer.
  324. template <typename T>
  325. static ALWAYS_INLINE T* WrapRawPtr(T* ptr) {
  326. uintptr_t address = partition_alloc::UntagPtr(ptr);
  327. if (IsSupportedAndNotNull(address)) {
  328. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  329. CHECK(ptr != nullptr);
  330. #endif
  331. AcquireInternal(address);
  332. }
  333. #if !defined(PA_HAS_64_BITS_POINTERS)
  334. else {
  335. partition_alloc::internal::AddressPoolManagerBitmap::
  336. BanSuperPageFromBRPPool(address);
  337. }
  338. #endif
  339. return ptr;
  340. }
  341. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  342. template <typename T>
  343. static ALWAYS_INLINE void ReleaseWrappedPtr(T* wrapped_ptr) {
  344. uintptr_t address = partition_alloc::UntagPtr(wrapped_ptr);
  345. if (IsSupportedAndNotNull(address)) {
  346. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  347. CHECK(wrapped_ptr != nullptr);
  348. #endif
  349. ReleaseInternal(address);
  350. }
  351. // We are unable to counteract BanSuperPageFromBRPPool(), called from
  352. // WrapRawPtr(). We only use one bit per super-page and, thus can't tell if
  353. // there's more than one associated raw_ptr<T> at a given time. The risk of
  354. // exhausting the entire address space is minuscule, therefore, we couldn't
  355. // resist the perf gain of a single relaxed store (in the above mentioned
  356. // function) over much more expensive two CAS operations, which we'd have to
  357. // use if we were to un-ban a super-page.
  358. }
  359. // Unwraps the pointer, while asserting that memory hasn't been freed. The
  360. // function is allowed to crash on nullptr.
  361. template <typename T>
  362. static ALWAYS_INLINE T* SafelyUnwrapPtrForDereference(T* wrapped_ptr) {
  363. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  364. uintptr_t address = partition_alloc::UntagPtr(wrapped_ptr);
  365. if (IsSupportedAndNotNull(address)) {
  366. CHECK(wrapped_ptr != nullptr);
  367. CHECK(IsPointeeAlive(address));
  368. }
  369. #endif
  370. return wrapped_ptr;
  371. }
  372. // Unwraps the pointer, while asserting that memory hasn't been freed. The
  373. // function must handle nullptr gracefully.
  374. template <typename T>
  375. static ALWAYS_INLINE T* SafelyUnwrapPtrForExtraction(T* wrapped_ptr) {
  376. return wrapped_ptr;
  377. }
  378. // Unwraps the pointer, without making an assertion on whether memory was
  379. // freed or not.
  380. template <typename T>
  381. static ALWAYS_INLINE T* UnsafelyUnwrapPtrForComparison(T* wrapped_ptr) {
  382. return wrapped_ptr;
  383. }
  384. // Upcasts the wrapped pointer.
  385. template <typename To, typename From>
  386. static ALWAYS_INLINE constexpr To* Upcast(From* wrapped_ptr) {
  387. static_assert(std::is_convertible<From*, To*>::value,
  388. "From must be convertible to To.");
  389. // Note, this cast may change the address if upcasting to base that lies in
  390. // the middle of the derived object.
  391. return wrapped_ptr;
  392. }
  393. // Advance the wrapped pointer by `delta_elems`.
  394. template <typename T,
  395. typename Z,
  396. typename = std::enable_if_t<offset_type<Z>, void>>
  397. static ALWAYS_INLINE T* Advance(T* wrapped_ptr, Z delta_elems) {
  398. #if BUILDFLAG(PUT_REF_COUNT_IN_PREVIOUS_SLOT)
  399. // First check if the new address lands within the same allocation
  400. // (end-of-allocation address is ok too). It has a non-trivial cost, but
  401. // it's cheaper and more secure than the previous implementation that
  402. // rewrapped the pointer (wrapped the new pointer and unwrapped the old
  403. // one).
  404. uintptr_t address = partition_alloc::UntagPtr(wrapped_ptr);
  405. if (IsSupportedAndNotNull(address))
  406. CHECK(IsValidDelta(address, delta_elems * static_cast<Z>(sizeof(T))));
  407. return wrapped_ptr + delta_elems;
  408. #else
  409. // In the "before allocation" mode, on 32-bit, we can run into a problem
  410. // that the end-of-allocation address could fall out of "GigaCage", if this
  411. // is the last slot of the super page, thus pointing to the guard page. This
  412. // mean the ref-count won't be decreased when the pointer is released
  413. // (leak).
  414. //
  415. // We could possibly solve it in a few different ways:
  416. // - Add the trailing guard page to "GigaCage", but we'd have to think very
  417. // hard if this doesn't create another hole.
  418. // - Add an address adjustment to "GigaCage" check, similar as the one in
  419. // PartitionAllocGetSlotStartInBRPPool(), but that seems fragile, not to
  420. // mention adding an extra instruction to an inlined hot path.
  421. // - Let the leak happen, since it should a very rare condition.
  422. // - Go back to the previous solution of rewrapping the pointer, but that
  423. // had an issue of losing protection in case the pointer ever gets shifter
  424. // before the end of allocation.
  425. //
  426. // We decided to cross that bridge once we get there... if we ever get
  427. // there. Currently there are no plans to switch back to the "before
  428. // allocation" mode.
  429. //
  430. // This problem doesn't exist in the "previous slot" mode, or any mode that
  431. // involves putting extras after the allocation, because the
  432. // end-of-allocation address belongs to the same slot.
  433. static_assert(false);
  434. #endif
  435. }
  436. // Returns a copy of a wrapped pointer, without making an assertion on whether
  437. // memory was freed or not.
  438. // This method increments the reference count of the allocation slot.
  439. template <typename T>
  440. static ALWAYS_INLINE T* Duplicate(T* wrapped_ptr) {
  441. return WrapRawPtr(wrapped_ptr);
  442. }
  443. // This is for accounting only, used by unit tests.
  444. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  445. static ALWAYS_INLINE void IncrementLessCountForTest() {}
  446. static ALWAYS_INLINE void IncrementPointerToMemberOperatorCountForTest() {}
  447. private:
  448. // We've evaluated several strategies (inline nothing, various parts, or
  449. // everything in |Wrap()| and |Release()|) using the Speedometer2 benchmark
  450. // to measure performance. The best results were obtained when only the
  451. // lightweight |IsManagedByPartitionAllocBRPPool()| check was inlined.
  452. // Therefore, we've extracted the rest into the functions below and marked
  453. // them as NOINLINE to prevent unintended LTO effects.
  454. static BASE_EXPORT NOINLINE void AcquireInternal(uintptr_t address);
  455. static BASE_EXPORT NOINLINE void ReleaseInternal(uintptr_t address);
  456. static BASE_EXPORT NOINLINE bool IsPointeeAlive(uintptr_t address);
  457. template <typename Z, typename = std::enable_if_t<offset_type<Z>, void>>
  458. static ALWAYS_INLINE bool IsValidDelta(uintptr_t address, Z delta_in_bytes) {
  459. if constexpr (std::is_signed_v<Z>)
  460. return IsValidSignedDelta(address, ptrdiff_t{delta_in_bytes});
  461. else
  462. return IsValidUnsignedDelta(address, size_t{delta_in_bytes});
  463. }
  464. static BASE_EXPORT NOINLINE bool IsValidSignedDelta(uintptr_t address,
  465. ptrdiff_t delta_in_bytes);
  466. static BASE_EXPORT NOINLINE bool IsValidUnsignedDelta(uintptr_t address,
  467. size_t delta_in_bytes);
  468. };
  469. #endif // BUILDFLAG(USE_BACKUP_REF_PTR)
  470. // Implementation that allows us to detect BackupRefPtr problems in ASan builds.
  471. struct AsanBackupRefPtrImpl {
  472. // Wraps a pointer.
  473. template <typename T>
  474. static ALWAYS_INLINE T* WrapRawPtr(T* ptr) {
  475. AsanCheckIfValidInstantiation(ptr);
  476. return ptr;
  477. }
  478. // Notifies the allocator when a wrapped pointer is being removed or replaced.
  479. template <typename T>
  480. static ALWAYS_INLINE void ReleaseWrappedPtr(T*) {}
  481. // Unwraps the pointer, while asserting that memory hasn't been freed. The
  482. // function is allowed to crash on nullptr.
  483. template <typename T>
  484. static ALWAYS_INLINE T* SafelyUnwrapPtrForDereference(T* wrapped_ptr) {
  485. AsanCheckIfValidDereference(wrapped_ptr);
  486. return wrapped_ptr;
  487. }
  488. // Unwraps the pointer, while asserting that memory hasn't been freed. The
  489. // function must handle nullptr gracefully.
  490. template <typename T>
  491. static ALWAYS_INLINE T* SafelyUnwrapPtrForExtraction(T* wrapped_ptr) {
  492. AsanCheckIfValidExtraction(wrapped_ptr);
  493. return wrapped_ptr;
  494. }
  495. // Unwraps the pointer, without making an assertion on whether memory was
  496. // freed or not.
  497. template <typename T>
  498. static ALWAYS_INLINE T* UnsafelyUnwrapPtrForComparison(T* wrapped_ptr) {
  499. return wrapped_ptr;
  500. }
  501. // Upcasts the wrapped pointer.
  502. template <typename To, typename From>
  503. static ALWAYS_INLINE constexpr To* Upcast(From* wrapped_ptr) {
  504. static_assert(std::is_convertible<From*, To*>::value,
  505. "From must be convertible to To.");
  506. // Note, this cast may change the address if upcasting to base that lies in
  507. // the middle of the derived object.
  508. return wrapped_ptr;
  509. }
  510. // Advance the wrapped pointer by `delta_elems`.
  511. template <typename T,
  512. typename Z,
  513. typename = std::enable_if_t<offset_type<Z>, void>>
  514. static ALWAYS_INLINE T* Advance(T* wrapped_ptr, Z delta_elems) {
  515. return wrapped_ptr + delta_elems;
  516. }
  517. // Returns a copy of a wrapped pointer, without making an assertion on whether
  518. // memory was freed or not.
  519. template <typename T>
  520. static ALWAYS_INLINE T* Duplicate(T* wrapped_ptr) {
  521. return wrapped_ptr;
  522. }
  523. // This is for accounting only, used by unit tests.
  524. static ALWAYS_INLINE void IncrementSwapCountForTest() {}
  525. static ALWAYS_INLINE void IncrementLessCountForTest() {}
  526. static ALWAYS_INLINE void IncrementPointerToMemberOperatorCountForTest() {}
  527. private:
  528. static BASE_EXPORT NOINLINE void AsanCheckIfValidInstantiation(
  529. void const volatile* ptr);
  530. static BASE_EXPORT NOINLINE void AsanCheckIfValidDereference(
  531. void const volatile* ptr);
  532. static BASE_EXPORT NOINLINE void AsanCheckIfValidExtraction(
  533. void const volatile* ptr);
  534. };
  535. template <class Super>
  536. struct RawPtrCountingImplWrapperForTest : public Super {
  537. template <typename T>
  538. static ALWAYS_INLINE T* WrapRawPtr(T* ptr) {
  539. ++wrap_raw_ptr_cnt;
  540. return Super::WrapRawPtr(ptr);
  541. }
  542. template <typename T>
  543. static ALWAYS_INLINE void ReleaseWrappedPtr(T* ptr) {
  544. ++release_wrapped_ptr_cnt;
  545. Super::ReleaseWrappedPtr(ptr);
  546. }
  547. template <typename T>
  548. static ALWAYS_INLINE T* SafelyUnwrapPtrForDereference(T* wrapped_ptr) {
  549. ++get_for_dereference_cnt;
  550. return Super::SafelyUnwrapPtrForDereference(wrapped_ptr);
  551. }
  552. template <typename T>
  553. static ALWAYS_INLINE T* SafelyUnwrapPtrForExtraction(T* wrapped_ptr) {
  554. ++get_for_extraction_cnt;
  555. return Super::SafelyUnwrapPtrForExtraction(wrapped_ptr);
  556. }
  557. template <typename T>
  558. static ALWAYS_INLINE T* UnsafelyUnwrapPtrForComparison(T* wrapped_ptr) {
  559. ++get_for_comparison_cnt;
  560. return Super::UnsafelyUnwrapPtrForComparison(wrapped_ptr);
  561. }
  562. static ALWAYS_INLINE void IncrementSwapCountForTest() {
  563. ++wrapped_ptr_swap_cnt;
  564. }
  565. static ALWAYS_INLINE void IncrementLessCountForTest() {
  566. ++wrapped_ptr_less_cnt;
  567. }
  568. static ALWAYS_INLINE void IncrementPointerToMemberOperatorCountForTest() {
  569. ++pointer_to_member_operator_cnt;
  570. }
  571. static void ClearCounters() {
  572. wrap_raw_ptr_cnt = 0;
  573. release_wrapped_ptr_cnt = 0;
  574. get_for_dereference_cnt = 0;
  575. get_for_extraction_cnt = 0;
  576. get_for_comparison_cnt = 0;
  577. wrapped_ptr_swap_cnt = 0;
  578. wrapped_ptr_less_cnt = 0;
  579. pointer_to_member_operator_cnt = 0;
  580. }
  581. static inline int wrap_raw_ptr_cnt = INT_MIN;
  582. static inline int release_wrapped_ptr_cnt = INT_MIN;
  583. static inline int get_for_dereference_cnt = INT_MIN;
  584. static inline int get_for_extraction_cnt = INT_MIN;
  585. static inline int get_for_comparison_cnt = INT_MIN;
  586. static inline int wrapped_ptr_swap_cnt = INT_MIN;
  587. static inline int wrapped_ptr_less_cnt = INT_MIN;
  588. static inline int pointer_to_member_operator_cnt = INT_MIN;
  589. };
  590. } // namespace internal
  591. namespace raw_ptr_traits {
  592. // IsSupportedType<T>::value answers whether raw_ptr<T> 1) compiles and 2) is
  593. // always safe at runtime. Templates that may end up using `raw_ptr<T>` should
  594. // use IsSupportedType to ensure that raw_ptr is not used with unsupported
  595. // types. As an example, see how base::internal::StorageTraits uses
  596. // IsSupportedType as a condition for using base::internal::UnretainedWrapper
  597. // (which has a `ptr_` field that will become `raw_ptr<T>` after the Big
  598. // Rewrite).
  599. template <typename T, typename SFINAE = void>
  600. struct IsSupportedType {
  601. static constexpr bool value = true;
  602. };
  603. // raw_ptr<T> is not compatible with function pointer types. Also, they don't
  604. // even need the raw_ptr protection, because they don't point on heap.
  605. template <typename T>
  606. struct IsSupportedType<T, std::enable_if_t<std::is_function<T>::value>> {
  607. static constexpr bool value = false;
  608. };
  609. // This section excludes some types from raw_ptr<T> to avoid them from being
  610. // used inside base::Unretained in performance sensitive places. These were
  611. // identified from sampling profiler data. See crbug.com/1287151 for more info.
  612. template <>
  613. struct IsSupportedType<cc::Scheduler> {
  614. static constexpr bool value = false;
  615. };
  616. template <>
  617. struct IsSupportedType<base::internal::DelayTimerBase> {
  618. static constexpr bool value = false;
  619. };
  620. template <>
  621. struct IsSupportedType<content::responsiveness::Calculator> {
  622. static constexpr bool value = false;
  623. };
  624. // IsRawPtrCountingImpl<T>::value answers whether T is a specialization of
  625. // RawPtrCountingImplWrapperForTest, to know whether Impl is for testing
  626. // purposes.
  627. template <typename T>
  628. struct IsRawPtrCountingImpl : std::false_type {};
  629. template <typename T>
  630. struct IsRawPtrCountingImpl<internal::RawPtrCountingImplWrapperForTest<T>>
  631. : std::true_type {};
  632. #if __OBJC__
  633. // raw_ptr<T> is not compatible with pointers to Objective-C classes for a
  634. // multitude of reasons. They may fail to compile in many cases, and wouldn't
  635. // work well with tagged pointers. Anyway, Objective-C objects have their own
  636. // way of tracking lifespan, hence don't need the raw_ptr protection as much.
  637. //
  638. // Such pointers are detected by checking if they're convertible to |id| type.
  639. template <typename T>
  640. struct IsSupportedType<T,
  641. std::enable_if_t<std::is_convertible<T*, id>::value>> {
  642. static constexpr bool value = false;
  643. };
  644. #endif // __OBJC__
  645. #if BUILDFLAG(IS_WIN)
  646. // raw_ptr<HWND__> is unsafe at runtime - if the handle happens to also
  647. // represent a valid pointer into a PartitionAlloc-managed region then it can
  648. // lead to manipulating random memory when treating it as BackupRefPtr
  649. // ref-count. See also https://crbug.com/1262017.
  650. //
  651. // TODO(https://crbug.com/1262017): Cover other handle types like HANDLE,
  652. // HLOCAL, HINTERNET, or HDEVINFO. Maybe we should avoid using raw_ptr<T> when
  653. // T=void (as is the case in these handle types). OTOH, explicit,
  654. // non-template-based raw_ptr<void> should be allowed. Maybe this can be solved
  655. // by having 2 traits: IsPointeeAlwaysSafe (to be used in templates) and
  656. // IsPointeeUsuallySafe (to be used in the static_assert in raw_ptr). The
  657. // upside of this approach is that it will safely handle base::Bind closing over
  658. // HANDLE. The downside of this approach is that base::Bind closing over a
  659. // void* pointer will not get UaF protection.
  660. #define CHROME_WINDOWS_HANDLE_TYPE(name) \
  661. template <> \
  662. struct IsSupportedType<name##__, void> { \
  663. static constexpr bool value = false; \
  664. };
  665. #include "base/win/win_handle_types_list.inc"
  666. #undef CHROME_WINDOWS_HANDLE_TYPE
  667. #endif
  668. } // namespace raw_ptr_traits
  669. // `raw_ptr<T>` is a non-owning smart pointer that has improved memory-safety
  670. // over raw pointers. It behaves just like a raw pointer on platforms where
  671. // USE_BACKUP_REF_PTR is off, and almost like one when it's on (the main
  672. // difference is that it's zero-initialized and cleared on destruction and
  673. // move). Unlike `std::unique_ptr<T>`, `base::scoped_refptr<T>`, etc., it
  674. // doesn’t manage ownership or lifetime of an allocated object - you are still
  675. // responsible for freeing the object when no longer used, just as you would
  676. // with a raw C++ pointer.
  677. //
  678. // Compared to a raw C++ pointer, on platforms where USE_BACKUP_REF_PTR is on,
  679. // `raw_ptr<T>` incurs additional performance overhead for initialization,
  680. // destruction, and assignment (including `ptr++` and `ptr += ...`). There is
  681. // no overhead when dereferencing a pointer.
  682. //
  683. // `raw_ptr<T>` is beneficial for security, because it can prevent a significant
  684. // percentage of Use-after-Free (UaF) bugs from being exploitable. `raw_ptr<T>`
  685. // has limited impact on stability - dereferencing a dangling pointer remains
  686. // Undefined Behavior. Note that the security protection is not yet enabled by
  687. // default.
  688. //
  689. // raw_ptr<T> is marked as [[gsl::Pointer]] which allows the compiler to catch
  690. // some bugs where the raw_ptr holds a dangling pointer to a temporary object.
  691. // However the [[gsl::Pointer]] analysis expects that such types do not have a
  692. // non-default move constructor/assignment. Thus, it's possible to get an error
  693. // where the pointer is not actually dangling, and have to work around the
  694. // compiler. We have not managed to construct such an example in Chromium yet.
  695. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  696. using RawPtrMayDangle = internal::BackupRefPtrImpl</*AllowDangling=*/true>;
  697. using RawPtrBanDanglingIfSupported =
  698. internal::BackupRefPtrImpl</*AllowDangling=*/false>;
  699. #elif BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
  700. using RawPtrMayDangle = internal::AsanBackupRefPtrImpl;
  701. using RawPtrBanDanglingIfSupported = internal::AsanBackupRefPtrImpl;
  702. #elif defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  703. using RawPtrMayDangle = internal::MTECheckedPtrImpl<
  704. internal::MTECheckedPtrImplPartitionAllocSupport>;
  705. using RawPtrBanDanglingIfSupported = internal::MTECheckedPtrImpl<
  706. internal::MTECheckedPtrImplPartitionAllocSupport>;
  707. #else
  708. using RawPtrMayDangle = internal::RawPtrNoOpImpl;
  709. using RawPtrBanDanglingIfSupported = internal::RawPtrNoOpImpl;
  710. #endif
  711. using DefaultRawPtrImpl = RawPtrBanDanglingIfSupported;
  712. template <typename T, typename Impl = DefaultRawPtrImpl>
  713. class TRIVIAL_ABI GSL_POINTER raw_ptr {
  714. using DanglingRawPtr = std::conditional_t<
  715. raw_ptr_traits::IsRawPtrCountingImpl<Impl>::value,
  716. raw_ptr<T, internal::RawPtrCountingImplWrapperForTest<RawPtrMayDangle>>,
  717. raw_ptr<T, RawPtrMayDangle>>;
  718. public:
  719. static_assert(raw_ptr_traits::IsSupportedType<T>::value,
  720. "raw_ptr<T> doesn't work with this kind of pointee type T");
  721. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  722. // BackupRefPtr requires a non-trivial default constructor, destructor, etc.
  723. constexpr ALWAYS_INLINE raw_ptr() noexcept : wrapped_ptr_(nullptr) {}
  724. ALWAYS_INLINE raw_ptr(const raw_ptr& p) noexcept
  725. : wrapped_ptr_(Impl::Duplicate(p.wrapped_ptr_)) {}
  726. ALWAYS_INLINE raw_ptr(raw_ptr&& p) noexcept {
  727. wrapped_ptr_ = p.wrapped_ptr_;
  728. p.wrapped_ptr_ = nullptr;
  729. }
  730. ALWAYS_INLINE raw_ptr& operator=(const raw_ptr& p) noexcept {
  731. // Duplicate before releasing, in case the pointer is assigned to itself.
  732. //
  733. // Unlike the move version of this operator, don't add |this != &p| branch,
  734. // for performance reasons. Even though Duplicate() is not cheap, we
  735. // practically never assign a raw_ptr<T> to itself. We suspect that a
  736. // cumulative cost of a conditional branch, even if always correctly
  737. // predicted, would exceed that.
  738. T* new_ptr = Impl::Duplicate(p.wrapped_ptr_);
  739. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  740. wrapped_ptr_ = new_ptr;
  741. return *this;
  742. }
  743. ALWAYS_INLINE raw_ptr& operator=(raw_ptr&& p) noexcept {
  744. // Unlike the the copy version of this operator, this branch is necessaty
  745. // for correctness.
  746. if (LIKELY(this != &p)) {
  747. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  748. wrapped_ptr_ = p.wrapped_ptr_;
  749. p.wrapped_ptr_ = nullptr;
  750. }
  751. return *this;
  752. }
  753. ALWAYS_INLINE ~raw_ptr() noexcept {
  754. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  755. // Work around external issues where raw_ptr is used after destruction.
  756. wrapped_ptr_ = nullptr;
  757. }
  758. #else // BUILDFLAG(USE_BACKUP_REF_PTR)
  759. // raw_ptr can be trivially default constructed (leaving |wrapped_ptr_|
  760. // uninitialized). This is needed for compatibility with raw pointers.
  761. //
  762. // TODO(lukasza): Always initialize |wrapped_ptr_|. Fix resulting build
  763. // errors. Analyze performance impact.
  764. constexpr ALWAYS_INLINE raw_ptr() noexcept = default;
  765. // In addition to nullptr_t ctor above, raw_ptr needs to have these
  766. // as |=default| or |constexpr| to avoid hitting -Wglobal-constructors in
  767. // cases like this:
  768. // struct SomeStruct { int int_field; raw_ptr<int> ptr_field; };
  769. // SomeStruct g_global_var = { 123, nullptr };
  770. ALWAYS_INLINE raw_ptr(const raw_ptr&) noexcept = default;
  771. ALWAYS_INLINE raw_ptr(raw_ptr&&) noexcept = default;
  772. ALWAYS_INLINE raw_ptr& operator=(const raw_ptr&) noexcept = default;
  773. ALWAYS_INLINE raw_ptr& operator=(raw_ptr&&) noexcept = default;
  774. ALWAYS_INLINE ~raw_ptr() noexcept = default;
  775. #endif // BUILDFLAG(USE_BACKUP_REF_PTR)
  776. // Deliberately implicit, because raw_ptr is supposed to resemble raw ptr.
  777. // NOLINTNEXTLINE(google-explicit-constructor)
  778. constexpr ALWAYS_INLINE raw_ptr(std::nullptr_t) noexcept
  779. : wrapped_ptr_(nullptr) {}
  780. // Deliberately implicit, because raw_ptr is supposed to resemble raw ptr.
  781. // NOLINTNEXTLINE(google-explicit-constructor)
  782. ALWAYS_INLINE raw_ptr(T* p) noexcept : wrapped_ptr_(Impl::WrapRawPtr(p)) {}
  783. // Deliberately implicit in order to support implicit upcast.
  784. template <typename U,
  785. typename Unused = std::enable_if_t<
  786. std::is_convertible<U*, T*>::value &&
  787. !std::is_void<typename std::remove_cv<T>::type>::value>>
  788. // NOLINTNEXTLINE(google-explicit-constructor)
  789. ALWAYS_INLINE raw_ptr(const raw_ptr<U, Impl>& ptr) noexcept
  790. : wrapped_ptr_(
  791. Impl::Duplicate(Impl::template Upcast<T, U>(ptr.wrapped_ptr_))) {}
  792. // Deliberately implicit in order to support implicit upcast.
  793. template <typename U,
  794. typename Unused = std::enable_if_t<
  795. std::is_convertible<U*, T*>::value &&
  796. !std::is_void<typename std::remove_cv<T>::type>::value>>
  797. // NOLINTNEXTLINE(google-explicit-constructor)
  798. ALWAYS_INLINE raw_ptr(raw_ptr<U, Impl>&& ptr) noexcept
  799. : wrapped_ptr_(Impl::template Upcast<T, U>(ptr.wrapped_ptr_)) {
  800. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  801. ptr.wrapped_ptr_ = nullptr;
  802. #endif
  803. }
  804. ALWAYS_INLINE raw_ptr& operator=(std::nullptr_t) noexcept {
  805. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  806. wrapped_ptr_ = nullptr;
  807. return *this;
  808. }
  809. ALWAYS_INLINE raw_ptr& operator=(T* p) noexcept {
  810. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  811. wrapped_ptr_ = Impl::WrapRawPtr(p);
  812. return *this;
  813. }
  814. // Upcast assignment
  815. template <typename U,
  816. typename Unused = std::enable_if_t<
  817. std::is_convertible<U*, T*>::value &&
  818. !std::is_void<typename std::remove_cv<T>::type>::value>>
  819. ALWAYS_INLINE raw_ptr& operator=(const raw_ptr<U, Impl>& ptr) noexcept {
  820. // Make sure that pointer isn't assigned to itself (look at pointer address,
  821. // not its value).
  822. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  823. CHECK(reinterpret_cast<uintptr_t>(this) !=
  824. reinterpret_cast<uintptr_t>(&ptr));
  825. #endif
  826. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  827. wrapped_ptr_ =
  828. Impl::Duplicate(Impl::template Upcast<T, U>(ptr.wrapped_ptr_));
  829. return *this;
  830. }
  831. template <typename U,
  832. typename Unused = std::enable_if_t<
  833. std::is_convertible<U*, T*>::value &&
  834. !std::is_void<typename std::remove_cv<T>::type>::value>>
  835. ALWAYS_INLINE raw_ptr& operator=(raw_ptr<U, Impl>&& ptr) noexcept {
  836. // Make sure that pointer isn't assigned to itself (look at pointer address,
  837. // not its value).
  838. #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
  839. CHECK(reinterpret_cast<uintptr_t>(this) !=
  840. reinterpret_cast<uintptr_t>(&ptr));
  841. #endif
  842. Impl::ReleaseWrappedPtr(wrapped_ptr_);
  843. wrapped_ptr_ = Impl::template Upcast<T, U>(ptr.wrapped_ptr_);
  844. #if BUILDFLAG(USE_BACKUP_REF_PTR)
  845. ptr.wrapped_ptr_ = nullptr;
  846. #endif
  847. return *this;
  848. }
  849. // Avoid using. The goal of raw_ptr is to be as close to raw pointer as
  850. // possible, so use it only if absolutely necessary (e.g. for const_cast).
  851. ALWAYS_INLINE T* get() const { return GetForExtraction(); }
  852. explicit ALWAYS_INLINE operator bool() const { return !!wrapped_ptr_; }
  853. template <typename U = T,
  854. typename Unused = std::enable_if_t<
  855. !std::is_void<typename std::remove_cv<U>::type>::value>>
  856. ALWAYS_INLINE U& operator*() const {
  857. return *GetForDereference();
  858. }
  859. ALWAYS_INLINE T* operator->() const { return GetForDereference(); }
  860. // Disables `(my_raw_ptr->*pmf)(...)` as a workaround for
  861. // the ICE in GCC parsing the code, reported at
  862. // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=103455
  863. template <typename PMF>
  864. void operator->*(PMF) const = delete;
  865. // Deliberately implicit, because raw_ptr is supposed to resemble raw ptr.
  866. // NOLINTNEXTLINE(runtime/explicit)
  867. ALWAYS_INLINE operator T*() const { return GetForExtraction(); }
  868. template <typename U>
  869. explicit ALWAYS_INLINE operator U*() const {
  870. // This operator may be invoked from static_cast, meaning the types may not
  871. // be implicitly convertible, hence the need for static_cast here.
  872. return static_cast<U*>(GetForExtraction());
  873. }
  874. ALWAYS_INLINE raw_ptr& operator++() {
  875. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, 1);
  876. return *this;
  877. }
  878. ALWAYS_INLINE raw_ptr& operator--() {
  879. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, -1);
  880. return *this;
  881. }
  882. ALWAYS_INLINE raw_ptr operator++(int /* post_increment */) {
  883. raw_ptr result = *this;
  884. ++(*this);
  885. return result;
  886. }
  887. ALWAYS_INLINE raw_ptr operator--(int /* post_decrement */) {
  888. raw_ptr result = *this;
  889. --(*this);
  890. return result;
  891. }
  892. template <typename Z,
  893. typename = std::enable_if_t<internal::offset_type<Z>, void>>
  894. ALWAYS_INLINE raw_ptr& operator+=(Z delta_elems) {
  895. wrapped_ptr_ = Impl::Advance(wrapped_ptr_, delta_elems);
  896. return *this;
  897. }
  898. template <typename Z,
  899. typename = std::enable_if_t<internal::offset_type<Z>, void>>
  900. ALWAYS_INLINE raw_ptr& operator-=(Z delta_elems) {
  901. return *this += -delta_elems;
  902. }
  903. // Stop referencing the underlying pointer and free its memory. Compared to
  904. // raw delete calls, this avoids the raw_ptr to be temporarily dangling
  905. // during the free operation, which will lead to taking the slower path that
  906. // involves quarantine.
  907. ALWAYS_INLINE void ClearAndDelete() noexcept {
  908. delete GetForExtractionAndReset();
  909. }
  910. ALWAYS_INLINE void ClearAndDeleteArray() noexcept {
  911. delete[] GetForExtractionAndReset();
  912. }
  913. // Clear the underlying pointer and return another raw_ptr instance
  914. // that is allowed to dangle.
  915. // This can be useful in cases such as:
  916. // ```
  917. // ptr.ExtractAsDangling()->SelfDestroy();
  918. // ```
  919. // ```
  920. // c_style_api_do_something_and_destroy(ptr.ExtractAsDangling());
  921. // ```
  922. // NOTE, avoid using this method as it indicates an error-prone memory
  923. // ownership pattern. If possible, use smart pointers like std::unique_ptr<>
  924. // instead of raw_ptr<>.
  925. // If you have to use it, avoid saving the return value in a long-lived
  926. // variable (or worse, a field)! It's meant to be used as a temporary, to be
  927. // passed into a cleanup & freeing function, and destructed at the end of the
  928. // statement.
  929. ALWAYS_INLINE DanglingRawPtr ExtractAsDangling() noexcept {
  930. if constexpr (std::is_same_v<
  931. typename std::remove_reference<decltype(*this)>::type,
  932. DanglingRawPtr>) {
  933. DanglingRawPtr res(std::move(*this));
  934. // Not all implementation clear the source pointer on move, so do it
  935. // here just in case. Should be cheap.
  936. operator=(nullptr);
  937. return res;
  938. } else {
  939. T* ptr = GetForExtraction();
  940. DanglingRawPtr res(ptr);
  941. operator=(nullptr);
  942. return res;
  943. }
  944. }
  945. // Comparison operators between raw_ptr and raw_ptr<U>/U*/std::nullptr_t.
  946. // Strictly speaking, it is not necessary to provide these: the compiler can
  947. // use the conversion operator implicitly to allow comparisons to fall back to
  948. // comparisons between raw pointers. However, `operator T*`/`operator U*` may
  949. // perform safety checks with a higher runtime cost, so to avoid this, provide
  950. // explicit comparison operators for all combinations of parameters.
  951. // Comparisons between `raw_ptr`s. This unusual declaration and separate
  952. // definition below is because `GetForComparison()` is a private method. The
  953. // more conventional approach of defining a comparison operator between
  954. // `raw_ptr` and `raw_ptr<U>` in the friend declaration itself does not work,
  955. // because a comparison operator defined inline would not be allowed to call
  956. // `raw_ptr<U>`'s private `GetForComparison()` method.
  957. template <typename U, typename V, typename I>
  958. friend ALWAYS_INLINE bool operator==(const raw_ptr<U, I>& lhs,
  959. const raw_ptr<V, I>& rhs);
  960. template <typename U>
  961. friend ALWAYS_INLINE bool operator!=(const raw_ptr& lhs,
  962. const raw_ptr<U, Impl>& rhs) {
  963. return !(lhs == rhs);
  964. }
  965. template <typename U, typename V, typename I>
  966. friend ALWAYS_INLINE bool operator<(const raw_ptr<U, I>& lhs,
  967. const raw_ptr<V, I>& rhs);
  968. template <typename U, typename V, typename I>
  969. friend ALWAYS_INLINE bool operator>(const raw_ptr<U, I>& lhs,
  970. const raw_ptr<V, I>& rhs);
  971. template <typename U, typename V, typename I>
  972. friend ALWAYS_INLINE bool operator<=(const raw_ptr<U, I>& lhs,
  973. const raw_ptr<V, I>& rhs);
  974. template <typename U, typename V, typename I>
  975. friend ALWAYS_INLINE bool operator>=(const raw_ptr<U, I>& lhs,
  976. const raw_ptr<V, I>& rhs);
  977. // Comparisons with U*. These operators also handle the case where the RHS is
  978. // T*.
  979. template <typename U>
  980. friend ALWAYS_INLINE bool operator==(const raw_ptr& lhs, U* rhs) {
  981. return lhs.GetForComparison() == rhs;
  982. }
  983. template <typename U>
  984. friend ALWAYS_INLINE bool operator!=(const raw_ptr& lhs, U* rhs) {
  985. return !(lhs == rhs);
  986. }
  987. template <typename U>
  988. friend ALWAYS_INLINE bool operator==(U* lhs, const raw_ptr& rhs) {
  989. return rhs == lhs; // Reverse order to call the operator above.
  990. }
  991. template <typename U>
  992. friend ALWAYS_INLINE bool operator!=(U* lhs, const raw_ptr& rhs) {
  993. return rhs != lhs; // Reverse order to call the operator above.
  994. }
  995. template <typename U>
  996. friend ALWAYS_INLINE bool operator<(const raw_ptr& lhs, U* rhs) {
  997. return lhs.GetForComparison() < rhs;
  998. }
  999. template <typename U>
  1000. friend ALWAYS_INLINE bool operator<=(const raw_ptr& lhs, U* rhs) {
  1001. return lhs.GetForComparison() <= rhs;
  1002. }
  1003. template <typename U>
  1004. friend ALWAYS_INLINE bool operator>(const raw_ptr& lhs, U* rhs) {
  1005. return lhs.GetForComparison() > rhs;
  1006. }
  1007. template <typename U>
  1008. friend ALWAYS_INLINE bool operator>=(const raw_ptr& lhs, U* rhs) {
  1009. return lhs.GetForComparison() >= rhs;
  1010. }
  1011. template <typename U>
  1012. friend ALWAYS_INLINE bool operator<(U* lhs, const raw_ptr& rhs) {
  1013. return lhs < rhs.GetForComparison();
  1014. }
  1015. template <typename U>
  1016. friend ALWAYS_INLINE bool operator<=(U* lhs, const raw_ptr& rhs) {
  1017. return lhs <= rhs.GetForComparison();
  1018. }
  1019. template <typename U>
  1020. friend ALWAYS_INLINE bool operator>(U* lhs, const raw_ptr& rhs) {
  1021. return lhs > rhs.GetForComparison();
  1022. }
  1023. template <typename U>
  1024. friend ALWAYS_INLINE bool operator>=(U* lhs, const raw_ptr& rhs) {
  1025. return lhs >= rhs.GetForComparison();
  1026. }
  1027. // Comparisons with `std::nullptr_t`.
  1028. friend ALWAYS_INLINE bool operator==(const raw_ptr& lhs, std::nullptr_t) {
  1029. return !lhs;
  1030. }
  1031. friend ALWAYS_INLINE bool operator!=(const raw_ptr& lhs, std::nullptr_t) {
  1032. return !!lhs; // Use !! otherwise the costly implicit cast will be used.
  1033. }
  1034. friend ALWAYS_INLINE bool operator==(std::nullptr_t, const raw_ptr& rhs) {
  1035. return !rhs;
  1036. }
  1037. friend ALWAYS_INLINE bool operator!=(std::nullptr_t, const raw_ptr& rhs) {
  1038. return !!rhs; // Use !! otherwise the costly implicit cast will be used.
  1039. }
  1040. friend ALWAYS_INLINE void swap(raw_ptr& lhs, raw_ptr& rhs) noexcept {
  1041. Impl::IncrementSwapCountForTest();
  1042. std::swap(lhs.wrapped_ptr_, rhs.wrapped_ptr_);
  1043. }
  1044. // If T can be serialised into trace, its alias is also
  1045. // serialisable.
  1046. template <class U = T>
  1047. typename perfetto::check_traced_value_support<U>::type WriteIntoTrace(
  1048. perfetto::TracedValue&& context) const {
  1049. perfetto::WriteIntoTracedValue(std::move(context), get());
  1050. }
  1051. private:
  1052. // This getter is meant for situations where the pointer is meant to be
  1053. // dereferenced. It is allowed to crash on nullptr (it may or may not),
  1054. // because it knows that the caller will crash on nullptr.
  1055. ALWAYS_INLINE T* GetForDereference() const {
  1056. return Impl::SafelyUnwrapPtrForDereference(wrapped_ptr_);
  1057. }
  1058. // This getter is meant for situations where the raw pointer is meant to be
  1059. // extracted outside of this class, but not necessarily with an intention to
  1060. // dereference. It mustn't crash on nullptr.
  1061. ALWAYS_INLINE T* GetForExtraction() const {
  1062. return Impl::SafelyUnwrapPtrForExtraction(wrapped_ptr_);
  1063. }
  1064. // This getter is meant *only* for situations where the pointer is meant to be
  1065. // compared (guaranteeing no dereference or extraction outside of this class).
  1066. // Any verifications can and should be skipped for performance reasons.
  1067. ALWAYS_INLINE T* GetForComparison() const {
  1068. return Impl::UnsafelyUnwrapPtrForComparison(wrapped_ptr_);
  1069. }
  1070. ALWAYS_INLINE T* GetForExtractionAndReset() {
  1071. T* ptr = GetForExtraction();
  1072. operator=(nullptr);
  1073. return ptr;
  1074. }
  1075. T* wrapped_ptr_;
  1076. template <typename U, typename V>
  1077. friend class raw_ptr;
  1078. };
  1079. template <typename U, typename V, typename I>
  1080. ALWAYS_INLINE bool operator==(const raw_ptr<U, I>& lhs,
  1081. const raw_ptr<V, I>& rhs) {
  1082. return lhs.GetForComparison() == rhs.GetForComparison();
  1083. }
  1084. template <typename U, typename V, typename I>
  1085. ALWAYS_INLINE bool operator<(const raw_ptr<U, I>& lhs,
  1086. const raw_ptr<V, I>& rhs) {
  1087. return lhs.GetForComparison() < rhs.GetForComparison();
  1088. }
  1089. template <typename U, typename V, typename I>
  1090. ALWAYS_INLINE bool operator>(const raw_ptr<U, I>& lhs,
  1091. const raw_ptr<V, I>& rhs) {
  1092. return lhs.GetForComparison() > rhs.GetForComparison();
  1093. }
  1094. template <typename U, typename V, typename I>
  1095. ALWAYS_INLINE bool operator<=(const raw_ptr<U, I>& lhs,
  1096. const raw_ptr<V, I>& rhs) {
  1097. return lhs.GetForComparison() <= rhs.GetForComparison();
  1098. }
  1099. template <typename U, typename V, typename I>
  1100. ALWAYS_INLINE bool operator>=(const raw_ptr<U, I>& lhs,
  1101. const raw_ptr<V, I>& rhs) {
  1102. return lhs.GetForComparison() >= rhs.GetForComparison();
  1103. }
  1104. // Template helpers for working with T* or raw_ptr<T>.
  1105. template <typename T>
  1106. struct IsPointer : std::false_type {};
  1107. template <typename T>
  1108. struct IsPointer<T*> : std::true_type {};
  1109. template <typename T, typename I>
  1110. struct IsPointer<raw_ptr<T, I>> : std::true_type {};
  1111. template <typename T>
  1112. inline constexpr bool IsPointerV = IsPointer<T>::value;
  1113. template <typename T>
  1114. struct RemovePointer {
  1115. using type = T;
  1116. };
  1117. template <typename T>
  1118. struct RemovePointer<T*> {
  1119. using type = T;
  1120. };
  1121. template <typename T, typename I>
  1122. struct RemovePointer<raw_ptr<T, I>> {
  1123. using type = T;
  1124. };
  1125. template <typename T>
  1126. using RemovePointerT = typename RemovePointer<T>::type;
  1127. } // namespace base
  1128. using base::raw_ptr;
  1129. // DisableDanglingPtrDetection option for raw_ptr annotates
  1130. // "intentional-and-safe" dangling pointers. It is meant to be used at the
  1131. // margin, only if there is no better way to re-architecture the code.
  1132. //
  1133. // Usage:
  1134. // raw_ptr<T, DisableDanglingPtrDetection> dangling_ptr;
  1135. //
  1136. // When using it, please provide a justification about what guarantees it will
  1137. // never be dereferenced after becoming dangling.
  1138. using DisableDanglingPtrDetection = base::RawPtrMayDangle;
  1139. // See `docs/dangling_ptr.md`
  1140. // Annotates known dangling raw_ptr. Those haven't been triaged yet. All the
  1141. // occurrences are meant to be removed. See https://crbug.com/1291138.
  1142. using DanglingUntriaged = DisableDanglingPtrDetection;
  1143. // The following template parameters are only meaningful when `raw_ptr`
  1144. // is `MTECheckedPtr` (never the case unless a particular GN arg is set
  1145. // true.) `raw_ptr` users need not worry about this and can refer solely
  1146. // to `DisableDanglingPtrDetection` and `DanglingUntriaged` above.
  1147. //
  1148. // The `raw_ptr` definition allows users to specify an implementation.
  1149. // When `MTECheckedPtr` is in play, we need to augment this
  1150. // implementation setting with another layer that allows the `raw_ptr`
  1151. // to degrade into the no-op version.
  1152. #if defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  1153. // Direct pass-through to no-op implementation.
  1154. using DegradeToNoOpWhenMTE = base::internal::RawPtrNoOpImpl;
  1155. // As above, but with the "untriaged dangling" annotation.
  1156. using DanglingUntriagedDegradeToNoOpWhenMTE = base::internal::RawPtrNoOpImpl;
  1157. // As above, but with the "explicitly disable protection" annotation.
  1158. using DisableDanglingPtrDetectionDegradeToNoOpWhenMTE =
  1159. base::internal::RawPtrNoOpImpl;
  1160. #else
  1161. // Direct pass-through to default implementation specified by `raw_ptr`
  1162. // template.
  1163. using DegradeToNoOpWhenMTE = base::RawPtrBanDanglingIfSupported;
  1164. // Direct pass-through to `DanglingUntriaged`.
  1165. using DanglingUntriagedDegradeToNoOpWhenMTE = DanglingUntriaged;
  1166. // Direct pass-through to `DisableDanglingPtrDetection`.
  1167. using DisableDanglingPtrDetectionDegradeToNoOpWhenMTE =
  1168. DisableDanglingPtrDetection;
  1169. #endif // defined(PA_USE_MTE_CHECKED_PTR_WITH_64_BITS_POINTERS)
  1170. namespace std {
  1171. // Override so set/map lookups do not create extra raw_ptr. This also allows
  1172. // dangling pointers to be used for lookup.
  1173. template <typename T, typename Impl>
  1174. struct less<raw_ptr<T, Impl>> {
  1175. using is_transparent = void;
  1176. bool operator()(const raw_ptr<T, Impl>& lhs,
  1177. const raw_ptr<T, Impl>& rhs) const {
  1178. Impl::IncrementLessCountForTest();
  1179. return lhs < rhs;
  1180. }
  1181. bool operator()(T* lhs, const raw_ptr<T, Impl>& rhs) const {
  1182. Impl::IncrementLessCountForTest();
  1183. return lhs < rhs;
  1184. }
  1185. bool operator()(const raw_ptr<T, Impl>& lhs, T* rhs) const {
  1186. Impl::IncrementLessCountForTest();
  1187. return lhs < rhs;
  1188. }
  1189. };
  1190. } // namespace std
  1191. #endif // BASE_MEMORY_RAW_PTR_H_