atomic-arch-fallback.h 59 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291
  1. // SPDX-License-Identifier: GPL-2.0
  2. // Generated by scripts/atomic/gen-atomic-fallback.sh
  3. // DO NOT MODIFY THIS FILE DIRECTLY
  4. #ifndef _LINUX_ATOMIC_FALLBACK_H
  5. #define _LINUX_ATOMIC_FALLBACK_H
  6. #include <linux/compiler.h>
  7. #ifndef arch_xchg_relaxed
  8. #define arch_xchg_relaxed arch_xchg
  9. #define arch_xchg_acquire arch_xchg
  10. #define arch_xchg_release arch_xchg
  11. #else /* arch_xchg_relaxed */
  12. #ifndef arch_xchg_acquire
  13. #define arch_xchg_acquire(...) \
  14. __atomic_op_acquire(arch_xchg, __VA_ARGS__)
  15. #endif
  16. #ifndef arch_xchg_release
  17. #define arch_xchg_release(...) \
  18. __atomic_op_release(arch_xchg, __VA_ARGS__)
  19. #endif
  20. #ifndef arch_xchg
  21. #define arch_xchg(...) \
  22. __atomic_op_fence(arch_xchg, __VA_ARGS__)
  23. #endif
  24. #endif /* arch_xchg_relaxed */
  25. #ifndef arch_cmpxchg_relaxed
  26. #define arch_cmpxchg_relaxed arch_cmpxchg
  27. #define arch_cmpxchg_acquire arch_cmpxchg
  28. #define arch_cmpxchg_release arch_cmpxchg
  29. #else /* arch_cmpxchg_relaxed */
  30. #ifndef arch_cmpxchg_acquire
  31. #define arch_cmpxchg_acquire(...) \
  32. __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
  33. #endif
  34. #ifndef arch_cmpxchg_release
  35. #define arch_cmpxchg_release(...) \
  36. __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
  37. #endif
  38. #ifndef arch_cmpxchg
  39. #define arch_cmpxchg(...) \
  40. __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
  41. #endif
  42. #endif /* arch_cmpxchg_relaxed */
  43. #ifndef arch_cmpxchg64_relaxed
  44. #define arch_cmpxchg64_relaxed arch_cmpxchg64
  45. #define arch_cmpxchg64_acquire arch_cmpxchg64
  46. #define arch_cmpxchg64_release arch_cmpxchg64
  47. #else /* arch_cmpxchg64_relaxed */
  48. #ifndef arch_cmpxchg64_acquire
  49. #define arch_cmpxchg64_acquire(...) \
  50. __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
  51. #endif
  52. #ifndef arch_cmpxchg64_release
  53. #define arch_cmpxchg64_release(...) \
  54. __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
  55. #endif
  56. #ifndef arch_cmpxchg64
  57. #define arch_cmpxchg64(...) \
  58. __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
  59. #endif
  60. #endif /* arch_cmpxchg64_relaxed */
  61. #ifndef arch_atomic_read_acquire
  62. static __always_inline int
  63. arch_atomic_read_acquire(const atomic_t *v)
  64. {
  65. return smp_load_acquire(&(v)->counter);
  66. }
  67. #define arch_atomic_read_acquire arch_atomic_read_acquire
  68. #endif
  69. #ifndef arch_atomic_set_release
  70. static __always_inline void
  71. arch_atomic_set_release(atomic_t *v, int i)
  72. {
  73. smp_store_release(&(v)->counter, i);
  74. }
  75. #define arch_atomic_set_release arch_atomic_set_release
  76. #endif
  77. #ifndef arch_atomic_add_return_relaxed
  78. #define arch_atomic_add_return_acquire arch_atomic_add_return
  79. #define arch_atomic_add_return_release arch_atomic_add_return
  80. #define arch_atomic_add_return_relaxed arch_atomic_add_return
  81. #else /* arch_atomic_add_return_relaxed */
  82. #ifndef arch_atomic_add_return_acquire
  83. static __always_inline int
  84. arch_atomic_add_return_acquire(int i, atomic_t *v)
  85. {
  86. int ret = arch_atomic_add_return_relaxed(i, v);
  87. __atomic_acquire_fence();
  88. return ret;
  89. }
  90. #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
  91. #endif
  92. #ifndef arch_atomic_add_return_release
  93. static __always_inline int
  94. arch_atomic_add_return_release(int i, atomic_t *v)
  95. {
  96. __atomic_release_fence();
  97. return arch_atomic_add_return_relaxed(i, v);
  98. }
  99. #define arch_atomic_add_return_release arch_atomic_add_return_release
  100. #endif
  101. #ifndef arch_atomic_add_return
  102. static __always_inline int
  103. arch_atomic_add_return(int i, atomic_t *v)
  104. {
  105. int ret;
  106. __atomic_pre_full_fence();
  107. ret = arch_atomic_add_return_relaxed(i, v);
  108. __atomic_post_full_fence();
  109. return ret;
  110. }
  111. #define arch_atomic_add_return arch_atomic_add_return
  112. #endif
  113. #endif /* arch_atomic_add_return_relaxed */
  114. #ifndef arch_atomic_fetch_add_relaxed
  115. #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
  116. #define arch_atomic_fetch_add_release arch_atomic_fetch_add
  117. #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
  118. #else /* arch_atomic_fetch_add_relaxed */
  119. #ifndef arch_atomic_fetch_add_acquire
  120. static __always_inline int
  121. arch_atomic_fetch_add_acquire(int i, atomic_t *v)
  122. {
  123. int ret = arch_atomic_fetch_add_relaxed(i, v);
  124. __atomic_acquire_fence();
  125. return ret;
  126. }
  127. #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
  128. #endif
  129. #ifndef arch_atomic_fetch_add_release
  130. static __always_inline int
  131. arch_atomic_fetch_add_release(int i, atomic_t *v)
  132. {
  133. __atomic_release_fence();
  134. return arch_atomic_fetch_add_relaxed(i, v);
  135. }
  136. #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
  137. #endif
  138. #ifndef arch_atomic_fetch_add
  139. static __always_inline int
  140. arch_atomic_fetch_add(int i, atomic_t *v)
  141. {
  142. int ret;
  143. __atomic_pre_full_fence();
  144. ret = arch_atomic_fetch_add_relaxed(i, v);
  145. __atomic_post_full_fence();
  146. return ret;
  147. }
  148. #define arch_atomic_fetch_add arch_atomic_fetch_add
  149. #endif
  150. #endif /* arch_atomic_fetch_add_relaxed */
  151. #ifndef arch_atomic_sub_return_relaxed
  152. #define arch_atomic_sub_return_acquire arch_atomic_sub_return
  153. #define arch_atomic_sub_return_release arch_atomic_sub_return
  154. #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
  155. #else /* arch_atomic_sub_return_relaxed */
  156. #ifndef arch_atomic_sub_return_acquire
  157. static __always_inline int
  158. arch_atomic_sub_return_acquire(int i, atomic_t *v)
  159. {
  160. int ret = arch_atomic_sub_return_relaxed(i, v);
  161. __atomic_acquire_fence();
  162. return ret;
  163. }
  164. #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
  165. #endif
  166. #ifndef arch_atomic_sub_return_release
  167. static __always_inline int
  168. arch_atomic_sub_return_release(int i, atomic_t *v)
  169. {
  170. __atomic_release_fence();
  171. return arch_atomic_sub_return_relaxed(i, v);
  172. }
  173. #define arch_atomic_sub_return_release arch_atomic_sub_return_release
  174. #endif
  175. #ifndef arch_atomic_sub_return
  176. static __always_inline int
  177. arch_atomic_sub_return(int i, atomic_t *v)
  178. {
  179. int ret;
  180. __atomic_pre_full_fence();
  181. ret = arch_atomic_sub_return_relaxed(i, v);
  182. __atomic_post_full_fence();
  183. return ret;
  184. }
  185. #define arch_atomic_sub_return arch_atomic_sub_return
  186. #endif
  187. #endif /* arch_atomic_sub_return_relaxed */
  188. #ifndef arch_atomic_fetch_sub_relaxed
  189. #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
  190. #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
  191. #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
  192. #else /* arch_atomic_fetch_sub_relaxed */
  193. #ifndef arch_atomic_fetch_sub_acquire
  194. static __always_inline int
  195. arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
  196. {
  197. int ret = arch_atomic_fetch_sub_relaxed(i, v);
  198. __atomic_acquire_fence();
  199. return ret;
  200. }
  201. #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
  202. #endif
  203. #ifndef arch_atomic_fetch_sub_release
  204. static __always_inline int
  205. arch_atomic_fetch_sub_release(int i, atomic_t *v)
  206. {
  207. __atomic_release_fence();
  208. return arch_atomic_fetch_sub_relaxed(i, v);
  209. }
  210. #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
  211. #endif
  212. #ifndef arch_atomic_fetch_sub
  213. static __always_inline int
  214. arch_atomic_fetch_sub(int i, atomic_t *v)
  215. {
  216. int ret;
  217. __atomic_pre_full_fence();
  218. ret = arch_atomic_fetch_sub_relaxed(i, v);
  219. __atomic_post_full_fence();
  220. return ret;
  221. }
  222. #define arch_atomic_fetch_sub arch_atomic_fetch_sub
  223. #endif
  224. #endif /* arch_atomic_fetch_sub_relaxed */
  225. #ifndef arch_atomic_inc
  226. static __always_inline void
  227. arch_atomic_inc(atomic_t *v)
  228. {
  229. arch_atomic_add(1, v);
  230. }
  231. #define arch_atomic_inc arch_atomic_inc
  232. #endif
  233. #ifndef arch_atomic_inc_return_relaxed
  234. #ifdef arch_atomic_inc_return
  235. #define arch_atomic_inc_return_acquire arch_atomic_inc_return
  236. #define arch_atomic_inc_return_release arch_atomic_inc_return
  237. #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
  238. #endif /* arch_atomic_inc_return */
  239. #ifndef arch_atomic_inc_return
  240. static __always_inline int
  241. arch_atomic_inc_return(atomic_t *v)
  242. {
  243. return arch_atomic_add_return(1, v);
  244. }
  245. #define arch_atomic_inc_return arch_atomic_inc_return
  246. #endif
  247. #ifndef arch_atomic_inc_return_acquire
  248. static __always_inline int
  249. arch_atomic_inc_return_acquire(atomic_t *v)
  250. {
  251. return arch_atomic_add_return_acquire(1, v);
  252. }
  253. #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
  254. #endif
  255. #ifndef arch_atomic_inc_return_release
  256. static __always_inline int
  257. arch_atomic_inc_return_release(atomic_t *v)
  258. {
  259. return arch_atomic_add_return_release(1, v);
  260. }
  261. #define arch_atomic_inc_return_release arch_atomic_inc_return_release
  262. #endif
  263. #ifndef arch_atomic_inc_return_relaxed
  264. static __always_inline int
  265. arch_atomic_inc_return_relaxed(atomic_t *v)
  266. {
  267. return arch_atomic_add_return_relaxed(1, v);
  268. }
  269. #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
  270. #endif
  271. #else /* arch_atomic_inc_return_relaxed */
  272. #ifndef arch_atomic_inc_return_acquire
  273. static __always_inline int
  274. arch_atomic_inc_return_acquire(atomic_t *v)
  275. {
  276. int ret = arch_atomic_inc_return_relaxed(v);
  277. __atomic_acquire_fence();
  278. return ret;
  279. }
  280. #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
  281. #endif
  282. #ifndef arch_atomic_inc_return_release
  283. static __always_inline int
  284. arch_atomic_inc_return_release(atomic_t *v)
  285. {
  286. __atomic_release_fence();
  287. return arch_atomic_inc_return_relaxed(v);
  288. }
  289. #define arch_atomic_inc_return_release arch_atomic_inc_return_release
  290. #endif
  291. #ifndef arch_atomic_inc_return
  292. static __always_inline int
  293. arch_atomic_inc_return(atomic_t *v)
  294. {
  295. int ret;
  296. __atomic_pre_full_fence();
  297. ret = arch_atomic_inc_return_relaxed(v);
  298. __atomic_post_full_fence();
  299. return ret;
  300. }
  301. #define arch_atomic_inc_return arch_atomic_inc_return
  302. #endif
  303. #endif /* arch_atomic_inc_return_relaxed */
  304. #ifndef arch_atomic_fetch_inc_relaxed
  305. #ifdef arch_atomic_fetch_inc
  306. #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
  307. #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
  308. #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
  309. #endif /* arch_atomic_fetch_inc */
  310. #ifndef arch_atomic_fetch_inc
  311. static __always_inline int
  312. arch_atomic_fetch_inc(atomic_t *v)
  313. {
  314. return arch_atomic_fetch_add(1, v);
  315. }
  316. #define arch_atomic_fetch_inc arch_atomic_fetch_inc
  317. #endif
  318. #ifndef arch_atomic_fetch_inc_acquire
  319. static __always_inline int
  320. arch_atomic_fetch_inc_acquire(atomic_t *v)
  321. {
  322. return arch_atomic_fetch_add_acquire(1, v);
  323. }
  324. #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
  325. #endif
  326. #ifndef arch_atomic_fetch_inc_release
  327. static __always_inline int
  328. arch_atomic_fetch_inc_release(atomic_t *v)
  329. {
  330. return arch_atomic_fetch_add_release(1, v);
  331. }
  332. #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
  333. #endif
  334. #ifndef arch_atomic_fetch_inc_relaxed
  335. static __always_inline int
  336. arch_atomic_fetch_inc_relaxed(atomic_t *v)
  337. {
  338. return arch_atomic_fetch_add_relaxed(1, v);
  339. }
  340. #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
  341. #endif
  342. #else /* arch_atomic_fetch_inc_relaxed */
  343. #ifndef arch_atomic_fetch_inc_acquire
  344. static __always_inline int
  345. arch_atomic_fetch_inc_acquire(atomic_t *v)
  346. {
  347. int ret = arch_atomic_fetch_inc_relaxed(v);
  348. __atomic_acquire_fence();
  349. return ret;
  350. }
  351. #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
  352. #endif
  353. #ifndef arch_atomic_fetch_inc_release
  354. static __always_inline int
  355. arch_atomic_fetch_inc_release(atomic_t *v)
  356. {
  357. __atomic_release_fence();
  358. return arch_atomic_fetch_inc_relaxed(v);
  359. }
  360. #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
  361. #endif
  362. #ifndef arch_atomic_fetch_inc
  363. static __always_inline int
  364. arch_atomic_fetch_inc(atomic_t *v)
  365. {
  366. int ret;
  367. __atomic_pre_full_fence();
  368. ret = arch_atomic_fetch_inc_relaxed(v);
  369. __atomic_post_full_fence();
  370. return ret;
  371. }
  372. #define arch_atomic_fetch_inc arch_atomic_fetch_inc
  373. #endif
  374. #endif /* arch_atomic_fetch_inc_relaxed */
  375. #ifndef arch_atomic_dec
  376. static __always_inline void
  377. arch_atomic_dec(atomic_t *v)
  378. {
  379. arch_atomic_sub(1, v);
  380. }
  381. #define arch_atomic_dec arch_atomic_dec
  382. #endif
  383. #ifndef arch_atomic_dec_return_relaxed
  384. #ifdef arch_atomic_dec_return
  385. #define arch_atomic_dec_return_acquire arch_atomic_dec_return
  386. #define arch_atomic_dec_return_release arch_atomic_dec_return
  387. #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
  388. #endif /* arch_atomic_dec_return */
  389. #ifndef arch_atomic_dec_return
  390. static __always_inline int
  391. arch_atomic_dec_return(atomic_t *v)
  392. {
  393. return arch_atomic_sub_return(1, v);
  394. }
  395. #define arch_atomic_dec_return arch_atomic_dec_return
  396. #endif
  397. #ifndef arch_atomic_dec_return_acquire
  398. static __always_inline int
  399. arch_atomic_dec_return_acquire(atomic_t *v)
  400. {
  401. return arch_atomic_sub_return_acquire(1, v);
  402. }
  403. #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
  404. #endif
  405. #ifndef arch_atomic_dec_return_release
  406. static __always_inline int
  407. arch_atomic_dec_return_release(atomic_t *v)
  408. {
  409. return arch_atomic_sub_return_release(1, v);
  410. }
  411. #define arch_atomic_dec_return_release arch_atomic_dec_return_release
  412. #endif
  413. #ifndef arch_atomic_dec_return_relaxed
  414. static __always_inline int
  415. arch_atomic_dec_return_relaxed(atomic_t *v)
  416. {
  417. return arch_atomic_sub_return_relaxed(1, v);
  418. }
  419. #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
  420. #endif
  421. #else /* arch_atomic_dec_return_relaxed */
  422. #ifndef arch_atomic_dec_return_acquire
  423. static __always_inline int
  424. arch_atomic_dec_return_acquire(atomic_t *v)
  425. {
  426. int ret = arch_atomic_dec_return_relaxed(v);
  427. __atomic_acquire_fence();
  428. return ret;
  429. }
  430. #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
  431. #endif
  432. #ifndef arch_atomic_dec_return_release
  433. static __always_inline int
  434. arch_atomic_dec_return_release(atomic_t *v)
  435. {
  436. __atomic_release_fence();
  437. return arch_atomic_dec_return_relaxed(v);
  438. }
  439. #define arch_atomic_dec_return_release arch_atomic_dec_return_release
  440. #endif
  441. #ifndef arch_atomic_dec_return
  442. static __always_inline int
  443. arch_atomic_dec_return(atomic_t *v)
  444. {
  445. int ret;
  446. __atomic_pre_full_fence();
  447. ret = arch_atomic_dec_return_relaxed(v);
  448. __atomic_post_full_fence();
  449. return ret;
  450. }
  451. #define arch_atomic_dec_return arch_atomic_dec_return
  452. #endif
  453. #endif /* arch_atomic_dec_return_relaxed */
  454. #ifndef arch_atomic_fetch_dec_relaxed
  455. #ifdef arch_atomic_fetch_dec
  456. #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
  457. #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
  458. #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
  459. #endif /* arch_atomic_fetch_dec */
  460. #ifndef arch_atomic_fetch_dec
  461. static __always_inline int
  462. arch_atomic_fetch_dec(atomic_t *v)
  463. {
  464. return arch_atomic_fetch_sub(1, v);
  465. }
  466. #define arch_atomic_fetch_dec arch_atomic_fetch_dec
  467. #endif
  468. #ifndef arch_atomic_fetch_dec_acquire
  469. static __always_inline int
  470. arch_atomic_fetch_dec_acquire(atomic_t *v)
  471. {
  472. return arch_atomic_fetch_sub_acquire(1, v);
  473. }
  474. #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
  475. #endif
  476. #ifndef arch_atomic_fetch_dec_release
  477. static __always_inline int
  478. arch_atomic_fetch_dec_release(atomic_t *v)
  479. {
  480. return arch_atomic_fetch_sub_release(1, v);
  481. }
  482. #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
  483. #endif
  484. #ifndef arch_atomic_fetch_dec_relaxed
  485. static __always_inline int
  486. arch_atomic_fetch_dec_relaxed(atomic_t *v)
  487. {
  488. return arch_atomic_fetch_sub_relaxed(1, v);
  489. }
  490. #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
  491. #endif
  492. #else /* arch_atomic_fetch_dec_relaxed */
  493. #ifndef arch_atomic_fetch_dec_acquire
  494. static __always_inline int
  495. arch_atomic_fetch_dec_acquire(atomic_t *v)
  496. {
  497. int ret = arch_atomic_fetch_dec_relaxed(v);
  498. __atomic_acquire_fence();
  499. return ret;
  500. }
  501. #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
  502. #endif
  503. #ifndef arch_atomic_fetch_dec_release
  504. static __always_inline int
  505. arch_atomic_fetch_dec_release(atomic_t *v)
  506. {
  507. __atomic_release_fence();
  508. return arch_atomic_fetch_dec_relaxed(v);
  509. }
  510. #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
  511. #endif
  512. #ifndef arch_atomic_fetch_dec
  513. static __always_inline int
  514. arch_atomic_fetch_dec(atomic_t *v)
  515. {
  516. int ret;
  517. __atomic_pre_full_fence();
  518. ret = arch_atomic_fetch_dec_relaxed(v);
  519. __atomic_post_full_fence();
  520. return ret;
  521. }
  522. #define arch_atomic_fetch_dec arch_atomic_fetch_dec
  523. #endif
  524. #endif /* arch_atomic_fetch_dec_relaxed */
  525. #ifndef arch_atomic_fetch_and_relaxed
  526. #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
  527. #define arch_atomic_fetch_and_release arch_atomic_fetch_and
  528. #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
  529. #else /* arch_atomic_fetch_and_relaxed */
  530. #ifndef arch_atomic_fetch_and_acquire
  531. static __always_inline int
  532. arch_atomic_fetch_and_acquire(int i, atomic_t *v)
  533. {
  534. int ret = arch_atomic_fetch_and_relaxed(i, v);
  535. __atomic_acquire_fence();
  536. return ret;
  537. }
  538. #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
  539. #endif
  540. #ifndef arch_atomic_fetch_and_release
  541. static __always_inline int
  542. arch_atomic_fetch_and_release(int i, atomic_t *v)
  543. {
  544. __atomic_release_fence();
  545. return arch_atomic_fetch_and_relaxed(i, v);
  546. }
  547. #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
  548. #endif
  549. #ifndef arch_atomic_fetch_and
  550. static __always_inline int
  551. arch_atomic_fetch_and(int i, atomic_t *v)
  552. {
  553. int ret;
  554. __atomic_pre_full_fence();
  555. ret = arch_atomic_fetch_and_relaxed(i, v);
  556. __atomic_post_full_fence();
  557. return ret;
  558. }
  559. #define arch_atomic_fetch_and arch_atomic_fetch_and
  560. #endif
  561. #endif /* arch_atomic_fetch_and_relaxed */
  562. #ifndef arch_atomic_andnot
  563. static __always_inline void
  564. arch_atomic_andnot(int i, atomic_t *v)
  565. {
  566. arch_atomic_and(~i, v);
  567. }
  568. #define arch_atomic_andnot arch_atomic_andnot
  569. #endif
  570. #ifndef arch_atomic_fetch_andnot_relaxed
  571. #ifdef arch_atomic_fetch_andnot
  572. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
  573. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
  574. #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
  575. #endif /* arch_atomic_fetch_andnot */
  576. #ifndef arch_atomic_fetch_andnot
  577. static __always_inline int
  578. arch_atomic_fetch_andnot(int i, atomic_t *v)
  579. {
  580. return arch_atomic_fetch_and(~i, v);
  581. }
  582. #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
  583. #endif
  584. #ifndef arch_atomic_fetch_andnot_acquire
  585. static __always_inline int
  586. arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
  587. {
  588. return arch_atomic_fetch_and_acquire(~i, v);
  589. }
  590. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
  591. #endif
  592. #ifndef arch_atomic_fetch_andnot_release
  593. static __always_inline int
  594. arch_atomic_fetch_andnot_release(int i, atomic_t *v)
  595. {
  596. return arch_atomic_fetch_and_release(~i, v);
  597. }
  598. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
  599. #endif
  600. #ifndef arch_atomic_fetch_andnot_relaxed
  601. static __always_inline int
  602. arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
  603. {
  604. return arch_atomic_fetch_and_relaxed(~i, v);
  605. }
  606. #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
  607. #endif
  608. #else /* arch_atomic_fetch_andnot_relaxed */
  609. #ifndef arch_atomic_fetch_andnot_acquire
  610. static __always_inline int
  611. arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
  612. {
  613. int ret = arch_atomic_fetch_andnot_relaxed(i, v);
  614. __atomic_acquire_fence();
  615. return ret;
  616. }
  617. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
  618. #endif
  619. #ifndef arch_atomic_fetch_andnot_release
  620. static __always_inline int
  621. arch_atomic_fetch_andnot_release(int i, atomic_t *v)
  622. {
  623. __atomic_release_fence();
  624. return arch_atomic_fetch_andnot_relaxed(i, v);
  625. }
  626. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
  627. #endif
  628. #ifndef arch_atomic_fetch_andnot
  629. static __always_inline int
  630. arch_atomic_fetch_andnot(int i, atomic_t *v)
  631. {
  632. int ret;
  633. __atomic_pre_full_fence();
  634. ret = arch_atomic_fetch_andnot_relaxed(i, v);
  635. __atomic_post_full_fence();
  636. return ret;
  637. }
  638. #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
  639. #endif
  640. #endif /* arch_atomic_fetch_andnot_relaxed */
  641. #ifndef arch_atomic_fetch_or_relaxed
  642. #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
  643. #define arch_atomic_fetch_or_release arch_atomic_fetch_or
  644. #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
  645. #else /* arch_atomic_fetch_or_relaxed */
  646. #ifndef arch_atomic_fetch_or_acquire
  647. static __always_inline int
  648. arch_atomic_fetch_or_acquire(int i, atomic_t *v)
  649. {
  650. int ret = arch_atomic_fetch_or_relaxed(i, v);
  651. __atomic_acquire_fence();
  652. return ret;
  653. }
  654. #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
  655. #endif
  656. #ifndef arch_atomic_fetch_or_release
  657. static __always_inline int
  658. arch_atomic_fetch_or_release(int i, atomic_t *v)
  659. {
  660. __atomic_release_fence();
  661. return arch_atomic_fetch_or_relaxed(i, v);
  662. }
  663. #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
  664. #endif
  665. #ifndef arch_atomic_fetch_or
  666. static __always_inline int
  667. arch_atomic_fetch_or(int i, atomic_t *v)
  668. {
  669. int ret;
  670. __atomic_pre_full_fence();
  671. ret = arch_atomic_fetch_or_relaxed(i, v);
  672. __atomic_post_full_fence();
  673. return ret;
  674. }
  675. #define arch_atomic_fetch_or arch_atomic_fetch_or
  676. #endif
  677. #endif /* arch_atomic_fetch_or_relaxed */
  678. #ifndef arch_atomic_fetch_xor_relaxed
  679. #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
  680. #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
  681. #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
  682. #else /* arch_atomic_fetch_xor_relaxed */
  683. #ifndef arch_atomic_fetch_xor_acquire
  684. static __always_inline int
  685. arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
  686. {
  687. int ret = arch_atomic_fetch_xor_relaxed(i, v);
  688. __atomic_acquire_fence();
  689. return ret;
  690. }
  691. #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
  692. #endif
  693. #ifndef arch_atomic_fetch_xor_release
  694. static __always_inline int
  695. arch_atomic_fetch_xor_release(int i, atomic_t *v)
  696. {
  697. __atomic_release_fence();
  698. return arch_atomic_fetch_xor_relaxed(i, v);
  699. }
  700. #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
  701. #endif
  702. #ifndef arch_atomic_fetch_xor
  703. static __always_inline int
  704. arch_atomic_fetch_xor(int i, atomic_t *v)
  705. {
  706. int ret;
  707. __atomic_pre_full_fence();
  708. ret = arch_atomic_fetch_xor_relaxed(i, v);
  709. __atomic_post_full_fence();
  710. return ret;
  711. }
  712. #define arch_atomic_fetch_xor arch_atomic_fetch_xor
  713. #endif
  714. #endif /* arch_atomic_fetch_xor_relaxed */
  715. #ifndef arch_atomic_xchg_relaxed
  716. #define arch_atomic_xchg_acquire arch_atomic_xchg
  717. #define arch_atomic_xchg_release arch_atomic_xchg
  718. #define arch_atomic_xchg_relaxed arch_atomic_xchg
  719. #else /* arch_atomic_xchg_relaxed */
  720. #ifndef arch_atomic_xchg_acquire
  721. static __always_inline int
  722. arch_atomic_xchg_acquire(atomic_t *v, int i)
  723. {
  724. int ret = arch_atomic_xchg_relaxed(v, i);
  725. __atomic_acquire_fence();
  726. return ret;
  727. }
  728. #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
  729. #endif
  730. #ifndef arch_atomic_xchg_release
  731. static __always_inline int
  732. arch_atomic_xchg_release(atomic_t *v, int i)
  733. {
  734. __atomic_release_fence();
  735. return arch_atomic_xchg_relaxed(v, i);
  736. }
  737. #define arch_atomic_xchg_release arch_atomic_xchg_release
  738. #endif
  739. #ifndef arch_atomic_xchg
  740. static __always_inline int
  741. arch_atomic_xchg(atomic_t *v, int i)
  742. {
  743. int ret;
  744. __atomic_pre_full_fence();
  745. ret = arch_atomic_xchg_relaxed(v, i);
  746. __atomic_post_full_fence();
  747. return ret;
  748. }
  749. #define arch_atomic_xchg arch_atomic_xchg
  750. #endif
  751. #endif /* arch_atomic_xchg_relaxed */
  752. #ifndef arch_atomic_cmpxchg_relaxed
  753. #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
  754. #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
  755. #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
  756. #else /* arch_atomic_cmpxchg_relaxed */
  757. #ifndef arch_atomic_cmpxchg_acquire
  758. static __always_inline int
  759. arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
  760. {
  761. int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
  762. __atomic_acquire_fence();
  763. return ret;
  764. }
  765. #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
  766. #endif
  767. #ifndef arch_atomic_cmpxchg_release
  768. static __always_inline int
  769. arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
  770. {
  771. __atomic_release_fence();
  772. return arch_atomic_cmpxchg_relaxed(v, old, new);
  773. }
  774. #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
  775. #endif
  776. #ifndef arch_atomic_cmpxchg
  777. static __always_inline int
  778. arch_atomic_cmpxchg(atomic_t *v, int old, int new)
  779. {
  780. int ret;
  781. __atomic_pre_full_fence();
  782. ret = arch_atomic_cmpxchg_relaxed(v, old, new);
  783. __atomic_post_full_fence();
  784. return ret;
  785. }
  786. #define arch_atomic_cmpxchg arch_atomic_cmpxchg
  787. #endif
  788. #endif /* arch_atomic_cmpxchg_relaxed */
  789. #ifndef arch_atomic_try_cmpxchg_relaxed
  790. #ifdef arch_atomic_try_cmpxchg
  791. #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
  792. #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
  793. #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
  794. #endif /* arch_atomic_try_cmpxchg */
  795. #ifndef arch_atomic_try_cmpxchg
  796. static __always_inline bool
  797. arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  798. {
  799. int r, o = *old;
  800. r = arch_atomic_cmpxchg(v, o, new);
  801. if (unlikely(r != o))
  802. *old = r;
  803. return likely(r == o);
  804. }
  805. #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
  806. #endif
  807. #ifndef arch_atomic_try_cmpxchg_acquire
  808. static __always_inline bool
  809. arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
  810. {
  811. int r, o = *old;
  812. r = arch_atomic_cmpxchg_acquire(v, o, new);
  813. if (unlikely(r != o))
  814. *old = r;
  815. return likely(r == o);
  816. }
  817. #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
  818. #endif
  819. #ifndef arch_atomic_try_cmpxchg_release
  820. static __always_inline bool
  821. arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
  822. {
  823. int r, o = *old;
  824. r = arch_atomic_cmpxchg_release(v, o, new);
  825. if (unlikely(r != o))
  826. *old = r;
  827. return likely(r == o);
  828. }
  829. #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
  830. #endif
  831. #ifndef arch_atomic_try_cmpxchg_relaxed
  832. static __always_inline bool
  833. arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
  834. {
  835. int r, o = *old;
  836. r = arch_atomic_cmpxchg_relaxed(v, o, new);
  837. if (unlikely(r != o))
  838. *old = r;
  839. return likely(r == o);
  840. }
  841. #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
  842. #endif
  843. #else /* arch_atomic_try_cmpxchg_relaxed */
  844. #ifndef arch_atomic_try_cmpxchg_acquire
  845. static __always_inline bool
  846. arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
  847. {
  848. bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
  849. __atomic_acquire_fence();
  850. return ret;
  851. }
  852. #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
  853. #endif
  854. #ifndef arch_atomic_try_cmpxchg_release
  855. static __always_inline bool
  856. arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
  857. {
  858. __atomic_release_fence();
  859. return arch_atomic_try_cmpxchg_relaxed(v, old, new);
  860. }
  861. #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
  862. #endif
  863. #ifndef arch_atomic_try_cmpxchg
  864. static __always_inline bool
  865. arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  866. {
  867. bool ret;
  868. __atomic_pre_full_fence();
  869. ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
  870. __atomic_post_full_fence();
  871. return ret;
  872. }
  873. #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
  874. #endif
  875. #endif /* arch_atomic_try_cmpxchg_relaxed */
  876. #ifndef arch_atomic_sub_and_test
  877. /**
  878. * arch_atomic_sub_and_test - subtract value from variable and test result
  879. * @i: integer value to subtract
  880. * @v: pointer of type atomic_t
  881. *
  882. * Atomically subtracts @i from @v and returns
  883. * true if the result is zero, or false for all
  884. * other cases.
  885. */
  886. static __always_inline bool
  887. arch_atomic_sub_and_test(int i, atomic_t *v)
  888. {
  889. return arch_atomic_sub_return(i, v) == 0;
  890. }
  891. #define arch_atomic_sub_and_test arch_atomic_sub_and_test
  892. #endif
  893. #ifndef arch_atomic_dec_and_test
  894. /**
  895. * arch_atomic_dec_and_test - decrement and test
  896. * @v: pointer of type atomic_t
  897. *
  898. * Atomically decrements @v by 1 and
  899. * returns true if the result is 0, or false for all other
  900. * cases.
  901. */
  902. static __always_inline bool
  903. arch_atomic_dec_and_test(atomic_t *v)
  904. {
  905. return arch_atomic_dec_return(v) == 0;
  906. }
  907. #define arch_atomic_dec_and_test arch_atomic_dec_and_test
  908. #endif
  909. #ifndef arch_atomic_inc_and_test
  910. /**
  911. * arch_atomic_inc_and_test - increment and test
  912. * @v: pointer of type atomic_t
  913. *
  914. * Atomically increments @v by 1
  915. * and returns true if the result is zero, or false for all
  916. * other cases.
  917. */
  918. static __always_inline bool
  919. arch_atomic_inc_and_test(atomic_t *v)
  920. {
  921. return arch_atomic_inc_return(v) == 0;
  922. }
  923. #define arch_atomic_inc_and_test arch_atomic_inc_and_test
  924. #endif
  925. #ifndef arch_atomic_add_negative
  926. /**
  927. * arch_atomic_add_negative - add and test if negative
  928. * @i: integer value to add
  929. * @v: pointer of type atomic_t
  930. *
  931. * Atomically adds @i to @v and returns true
  932. * if the result is negative, or false when
  933. * result is greater than or equal to zero.
  934. */
  935. static __always_inline bool
  936. arch_atomic_add_negative(int i, atomic_t *v)
  937. {
  938. return arch_atomic_add_return(i, v) < 0;
  939. }
  940. #define arch_atomic_add_negative arch_atomic_add_negative
  941. #endif
  942. #ifndef arch_atomic_fetch_add_unless
  943. /**
  944. * arch_atomic_fetch_add_unless - add unless the number is already a given value
  945. * @v: pointer of type atomic_t
  946. * @a: the amount to add to v...
  947. * @u: ...unless v is equal to u.
  948. *
  949. * Atomically adds @a to @v, so long as @v was not already @u.
  950. * Returns original value of @v
  951. */
  952. static __always_inline int
  953. arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
  954. {
  955. int c = arch_atomic_read(v);
  956. do {
  957. if (unlikely(c == u))
  958. break;
  959. } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
  960. return c;
  961. }
  962. #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
  963. #endif
  964. #ifndef arch_atomic_add_unless
  965. /**
  966. * arch_atomic_add_unless - add unless the number is already a given value
  967. * @v: pointer of type atomic_t
  968. * @a: the amount to add to v...
  969. * @u: ...unless v is equal to u.
  970. *
  971. * Atomically adds @a to @v, if @v was not already @u.
  972. * Returns true if the addition was done.
  973. */
  974. static __always_inline bool
  975. arch_atomic_add_unless(atomic_t *v, int a, int u)
  976. {
  977. return arch_atomic_fetch_add_unless(v, a, u) != u;
  978. }
  979. #define arch_atomic_add_unless arch_atomic_add_unless
  980. #endif
  981. #ifndef arch_atomic_inc_not_zero
  982. /**
  983. * arch_atomic_inc_not_zero - increment unless the number is zero
  984. * @v: pointer of type atomic_t
  985. *
  986. * Atomically increments @v by 1, if @v is non-zero.
  987. * Returns true if the increment was done.
  988. */
  989. static __always_inline bool
  990. arch_atomic_inc_not_zero(atomic_t *v)
  991. {
  992. return arch_atomic_add_unless(v, 1, 0);
  993. }
  994. #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
  995. #endif
  996. #ifndef arch_atomic_inc_unless_negative
  997. static __always_inline bool
  998. arch_atomic_inc_unless_negative(atomic_t *v)
  999. {
  1000. int c = arch_atomic_read(v);
  1001. do {
  1002. if (unlikely(c < 0))
  1003. return false;
  1004. } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
  1005. return true;
  1006. }
  1007. #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
  1008. #endif
  1009. #ifndef arch_atomic_dec_unless_positive
  1010. static __always_inline bool
  1011. arch_atomic_dec_unless_positive(atomic_t *v)
  1012. {
  1013. int c = arch_atomic_read(v);
  1014. do {
  1015. if (unlikely(c > 0))
  1016. return false;
  1017. } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
  1018. return true;
  1019. }
  1020. #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
  1021. #endif
  1022. #ifndef arch_atomic_dec_if_positive
  1023. static __always_inline int
  1024. arch_atomic_dec_if_positive(atomic_t *v)
  1025. {
  1026. int dec, c = arch_atomic_read(v);
  1027. do {
  1028. dec = c - 1;
  1029. if (unlikely(dec < 0))
  1030. break;
  1031. } while (!arch_atomic_try_cmpxchg(v, &c, dec));
  1032. return dec;
  1033. }
  1034. #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
  1035. #endif
  1036. #ifdef CONFIG_GENERIC_ATOMIC64
  1037. #include <asm-generic/atomic64.h>
  1038. #endif
  1039. #ifndef arch_atomic64_read_acquire
  1040. static __always_inline s64
  1041. arch_atomic64_read_acquire(const atomic64_t *v)
  1042. {
  1043. return smp_load_acquire(&(v)->counter);
  1044. }
  1045. #define arch_atomic64_read_acquire arch_atomic64_read_acquire
  1046. #endif
  1047. #ifndef arch_atomic64_set_release
  1048. static __always_inline void
  1049. arch_atomic64_set_release(atomic64_t *v, s64 i)
  1050. {
  1051. smp_store_release(&(v)->counter, i);
  1052. }
  1053. #define arch_atomic64_set_release arch_atomic64_set_release
  1054. #endif
  1055. #ifndef arch_atomic64_add_return_relaxed
  1056. #define arch_atomic64_add_return_acquire arch_atomic64_add_return
  1057. #define arch_atomic64_add_return_release arch_atomic64_add_return
  1058. #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
  1059. #else /* arch_atomic64_add_return_relaxed */
  1060. #ifndef arch_atomic64_add_return_acquire
  1061. static __always_inline s64
  1062. arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
  1063. {
  1064. s64 ret = arch_atomic64_add_return_relaxed(i, v);
  1065. __atomic_acquire_fence();
  1066. return ret;
  1067. }
  1068. #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
  1069. #endif
  1070. #ifndef arch_atomic64_add_return_release
  1071. static __always_inline s64
  1072. arch_atomic64_add_return_release(s64 i, atomic64_t *v)
  1073. {
  1074. __atomic_release_fence();
  1075. return arch_atomic64_add_return_relaxed(i, v);
  1076. }
  1077. #define arch_atomic64_add_return_release arch_atomic64_add_return_release
  1078. #endif
  1079. #ifndef arch_atomic64_add_return
  1080. static __always_inline s64
  1081. arch_atomic64_add_return(s64 i, atomic64_t *v)
  1082. {
  1083. s64 ret;
  1084. __atomic_pre_full_fence();
  1085. ret = arch_atomic64_add_return_relaxed(i, v);
  1086. __atomic_post_full_fence();
  1087. return ret;
  1088. }
  1089. #define arch_atomic64_add_return arch_atomic64_add_return
  1090. #endif
  1091. #endif /* arch_atomic64_add_return_relaxed */
  1092. #ifndef arch_atomic64_fetch_add_relaxed
  1093. #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
  1094. #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
  1095. #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
  1096. #else /* arch_atomic64_fetch_add_relaxed */
  1097. #ifndef arch_atomic64_fetch_add_acquire
  1098. static __always_inline s64
  1099. arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
  1100. {
  1101. s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
  1102. __atomic_acquire_fence();
  1103. return ret;
  1104. }
  1105. #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
  1106. #endif
  1107. #ifndef arch_atomic64_fetch_add_release
  1108. static __always_inline s64
  1109. arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
  1110. {
  1111. __atomic_release_fence();
  1112. return arch_atomic64_fetch_add_relaxed(i, v);
  1113. }
  1114. #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
  1115. #endif
  1116. #ifndef arch_atomic64_fetch_add
  1117. static __always_inline s64
  1118. arch_atomic64_fetch_add(s64 i, atomic64_t *v)
  1119. {
  1120. s64 ret;
  1121. __atomic_pre_full_fence();
  1122. ret = arch_atomic64_fetch_add_relaxed(i, v);
  1123. __atomic_post_full_fence();
  1124. return ret;
  1125. }
  1126. #define arch_atomic64_fetch_add arch_atomic64_fetch_add
  1127. #endif
  1128. #endif /* arch_atomic64_fetch_add_relaxed */
  1129. #ifndef arch_atomic64_sub_return_relaxed
  1130. #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
  1131. #define arch_atomic64_sub_return_release arch_atomic64_sub_return
  1132. #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
  1133. #else /* arch_atomic64_sub_return_relaxed */
  1134. #ifndef arch_atomic64_sub_return_acquire
  1135. static __always_inline s64
  1136. arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
  1137. {
  1138. s64 ret = arch_atomic64_sub_return_relaxed(i, v);
  1139. __atomic_acquire_fence();
  1140. return ret;
  1141. }
  1142. #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
  1143. #endif
  1144. #ifndef arch_atomic64_sub_return_release
  1145. static __always_inline s64
  1146. arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
  1147. {
  1148. __atomic_release_fence();
  1149. return arch_atomic64_sub_return_relaxed(i, v);
  1150. }
  1151. #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
  1152. #endif
  1153. #ifndef arch_atomic64_sub_return
  1154. static __always_inline s64
  1155. arch_atomic64_sub_return(s64 i, atomic64_t *v)
  1156. {
  1157. s64 ret;
  1158. __atomic_pre_full_fence();
  1159. ret = arch_atomic64_sub_return_relaxed(i, v);
  1160. __atomic_post_full_fence();
  1161. return ret;
  1162. }
  1163. #define arch_atomic64_sub_return arch_atomic64_sub_return
  1164. #endif
  1165. #endif /* arch_atomic64_sub_return_relaxed */
  1166. #ifndef arch_atomic64_fetch_sub_relaxed
  1167. #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
  1168. #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
  1169. #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
  1170. #else /* arch_atomic64_fetch_sub_relaxed */
  1171. #ifndef arch_atomic64_fetch_sub_acquire
  1172. static __always_inline s64
  1173. arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
  1174. {
  1175. s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
  1176. __atomic_acquire_fence();
  1177. return ret;
  1178. }
  1179. #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
  1180. #endif
  1181. #ifndef arch_atomic64_fetch_sub_release
  1182. static __always_inline s64
  1183. arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
  1184. {
  1185. __atomic_release_fence();
  1186. return arch_atomic64_fetch_sub_relaxed(i, v);
  1187. }
  1188. #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
  1189. #endif
  1190. #ifndef arch_atomic64_fetch_sub
  1191. static __always_inline s64
  1192. arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
  1193. {
  1194. s64 ret;
  1195. __atomic_pre_full_fence();
  1196. ret = arch_atomic64_fetch_sub_relaxed(i, v);
  1197. __atomic_post_full_fence();
  1198. return ret;
  1199. }
  1200. #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
  1201. #endif
  1202. #endif /* arch_atomic64_fetch_sub_relaxed */
  1203. #ifndef arch_atomic64_inc
  1204. static __always_inline void
  1205. arch_atomic64_inc(atomic64_t *v)
  1206. {
  1207. arch_atomic64_add(1, v);
  1208. }
  1209. #define arch_atomic64_inc arch_atomic64_inc
  1210. #endif
  1211. #ifndef arch_atomic64_inc_return_relaxed
  1212. #ifdef arch_atomic64_inc_return
  1213. #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
  1214. #define arch_atomic64_inc_return_release arch_atomic64_inc_return
  1215. #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
  1216. #endif /* arch_atomic64_inc_return */
  1217. #ifndef arch_atomic64_inc_return
  1218. static __always_inline s64
  1219. arch_atomic64_inc_return(atomic64_t *v)
  1220. {
  1221. return arch_atomic64_add_return(1, v);
  1222. }
  1223. #define arch_atomic64_inc_return arch_atomic64_inc_return
  1224. #endif
  1225. #ifndef arch_atomic64_inc_return_acquire
  1226. static __always_inline s64
  1227. arch_atomic64_inc_return_acquire(atomic64_t *v)
  1228. {
  1229. return arch_atomic64_add_return_acquire(1, v);
  1230. }
  1231. #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
  1232. #endif
  1233. #ifndef arch_atomic64_inc_return_release
  1234. static __always_inline s64
  1235. arch_atomic64_inc_return_release(atomic64_t *v)
  1236. {
  1237. return arch_atomic64_add_return_release(1, v);
  1238. }
  1239. #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
  1240. #endif
  1241. #ifndef arch_atomic64_inc_return_relaxed
  1242. static __always_inline s64
  1243. arch_atomic64_inc_return_relaxed(atomic64_t *v)
  1244. {
  1245. return arch_atomic64_add_return_relaxed(1, v);
  1246. }
  1247. #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
  1248. #endif
  1249. #else /* arch_atomic64_inc_return_relaxed */
  1250. #ifndef arch_atomic64_inc_return_acquire
  1251. static __always_inline s64
  1252. arch_atomic64_inc_return_acquire(atomic64_t *v)
  1253. {
  1254. s64 ret = arch_atomic64_inc_return_relaxed(v);
  1255. __atomic_acquire_fence();
  1256. return ret;
  1257. }
  1258. #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
  1259. #endif
  1260. #ifndef arch_atomic64_inc_return_release
  1261. static __always_inline s64
  1262. arch_atomic64_inc_return_release(atomic64_t *v)
  1263. {
  1264. __atomic_release_fence();
  1265. return arch_atomic64_inc_return_relaxed(v);
  1266. }
  1267. #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
  1268. #endif
  1269. #ifndef arch_atomic64_inc_return
  1270. static __always_inline s64
  1271. arch_atomic64_inc_return(atomic64_t *v)
  1272. {
  1273. s64 ret;
  1274. __atomic_pre_full_fence();
  1275. ret = arch_atomic64_inc_return_relaxed(v);
  1276. __atomic_post_full_fence();
  1277. return ret;
  1278. }
  1279. #define arch_atomic64_inc_return arch_atomic64_inc_return
  1280. #endif
  1281. #endif /* arch_atomic64_inc_return_relaxed */
  1282. #ifndef arch_atomic64_fetch_inc_relaxed
  1283. #ifdef arch_atomic64_fetch_inc
  1284. #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
  1285. #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
  1286. #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
  1287. #endif /* arch_atomic64_fetch_inc */
  1288. #ifndef arch_atomic64_fetch_inc
  1289. static __always_inline s64
  1290. arch_atomic64_fetch_inc(atomic64_t *v)
  1291. {
  1292. return arch_atomic64_fetch_add(1, v);
  1293. }
  1294. #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
  1295. #endif
  1296. #ifndef arch_atomic64_fetch_inc_acquire
  1297. static __always_inline s64
  1298. arch_atomic64_fetch_inc_acquire(atomic64_t *v)
  1299. {
  1300. return arch_atomic64_fetch_add_acquire(1, v);
  1301. }
  1302. #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
  1303. #endif
  1304. #ifndef arch_atomic64_fetch_inc_release
  1305. static __always_inline s64
  1306. arch_atomic64_fetch_inc_release(atomic64_t *v)
  1307. {
  1308. return arch_atomic64_fetch_add_release(1, v);
  1309. }
  1310. #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
  1311. #endif
  1312. #ifndef arch_atomic64_fetch_inc_relaxed
  1313. static __always_inline s64
  1314. arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
  1315. {
  1316. return arch_atomic64_fetch_add_relaxed(1, v);
  1317. }
  1318. #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
  1319. #endif
  1320. #else /* arch_atomic64_fetch_inc_relaxed */
  1321. #ifndef arch_atomic64_fetch_inc_acquire
  1322. static __always_inline s64
  1323. arch_atomic64_fetch_inc_acquire(atomic64_t *v)
  1324. {
  1325. s64 ret = arch_atomic64_fetch_inc_relaxed(v);
  1326. __atomic_acquire_fence();
  1327. return ret;
  1328. }
  1329. #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
  1330. #endif
  1331. #ifndef arch_atomic64_fetch_inc_release
  1332. static __always_inline s64
  1333. arch_atomic64_fetch_inc_release(atomic64_t *v)
  1334. {
  1335. __atomic_release_fence();
  1336. return arch_atomic64_fetch_inc_relaxed(v);
  1337. }
  1338. #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
  1339. #endif
  1340. #ifndef arch_atomic64_fetch_inc
  1341. static __always_inline s64
  1342. arch_atomic64_fetch_inc(atomic64_t *v)
  1343. {
  1344. s64 ret;
  1345. __atomic_pre_full_fence();
  1346. ret = arch_atomic64_fetch_inc_relaxed(v);
  1347. __atomic_post_full_fence();
  1348. return ret;
  1349. }
  1350. #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
  1351. #endif
  1352. #endif /* arch_atomic64_fetch_inc_relaxed */
  1353. #ifndef arch_atomic64_dec
  1354. static __always_inline void
  1355. arch_atomic64_dec(atomic64_t *v)
  1356. {
  1357. arch_atomic64_sub(1, v);
  1358. }
  1359. #define arch_atomic64_dec arch_atomic64_dec
  1360. #endif
  1361. #ifndef arch_atomic64_dec_return_relaxed
  1362. #ifdef arch_atomic64_dec_return
  1363. #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
  1364. #define arch_atomic64_dec_return_release arch_atomic64_dec_return
  1365. #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
  1366. #endif /* arch_atomic64_dec_return */
  1367. #ifndef arch_atomic64_dec_return
  1368. static __always_inline s64
  1369. arch_atomic64_dec_return(atomic64_t *v)
  1370. {
  1371. return arch_atomic64_sub_return(1, v);
  1372. }
  1373. #define arch_atomic64_dec_return arch_atomic64_dec_return
  1374. #endif
  1375. #ifndef arch_atomic64_dec_return_acquire
  1376. static __always_inline s64
  1377. arch_atomic64_dec_return_acquire(atomic64_t *v)
  1378. {
  1379. return arch_atomic64_sub_return_acquire(1, v);
  1380. }
  1381. #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
  1382. #endif
  1383. #ifndef arch_atomic64_dec_return_release
  1384. static __always_inline s64
  1385. arch_atomic64_dec_return_release(atomic64_t *v)
  1386. {
  1387. return arch_atomic64_sub_return_release(1, v);
  1388. }
  1389. #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
  1390. #endif
  1391. #ifndef arch_atomic64_dec_return_relaxed
  1392. static __always_inline s64
  1393. arch_atomic64_dec_return_relaxed(atomic64_t *v)
  1394. {
  1395. return arch_atomic64_sub_return_relaxed(1, v);
  1396. }
  1397. #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
  1398. #endif
  1399. #else /* arch_atomic64_dec_return_relaxed */
  1400. #ifndef arch_atomic64_dec_return_acquire
  1401. static __always_inline s64
  1402. arch_atomic64_dec_return_acquire(atomic64_t *v)
  1403. {
  1404. s64 ret = arch_atomic64_dec_return_relaxed(v);
  1405. __atomic_acquire_fence();
  1406. return ret;
  1407. }
  1408. #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
  1409. #endif
  1410. #ifndef arch_atomic64_dec_return_release
  1411. static __always_inline s64
  1412. arch_atomic64_dec_return_release(atomic64_t *v)
  1413. {
  1414. __atomic_release_fence();
  1415. return arch_atomic64_dec_return_relaxed(v);
  1416. }
  1417. #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
  1418. #endif
  1419. #ifndef arch_atomic64_dec_return
  1420. static __always_inline s64
  1421. arch_atomic64_dec_return(atomic64_t *v)
  1422. {
  1423. s64 ret;
  1424. __atomic_pre_full_fence();
  1425. ret = arch_atomic64_dec_return_relaxed(v);
  1426. __atomic_post_full_fence();
  1427. return ret;
  1428. }
  1429. #define arch_atomic64_dec_return arch_atomic64_dec_return
  1430. #endif
  1431. #endif /* arch_atomic64_dec_return_relaxed */
  1432. #ifndef arch_atomic64_fetch_dec_relaxed
  1433. #ifdef arch_atomic64_fetch_dec
  1434. #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
  1435. #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
  1436. #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
  1437. #endif /* arch_atomic64_fetch_dec */
  1438. #ifndef arch_atomic64_fetch_dec
  1439. static __always_inline s64
  1440. arch_atomic64_fetch_dec(atomic64_t *v)
  1441. {
  1442. return arch_atomic64_fetch_sub(1, v);
  1443. }
  1444. #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
  1445. #endif
  1446. #ifndef arch_atomic64_fetch_dec_acquire
  1447. static __always_inline s64
  1448. arch_atomic64_fetch_dec_acquire(atomic64_t *v)
  1449. {
  1450. return arch_atomic64_fetch_sub_acquire(1, v);
  1451. }
  1452. #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
  1453. #endif
  1454. #ifndef arch_atomic64_fetch_dec_release
  1455. static __always_inline s64
  1456. arch_atomic64_fetch_dec_release(atomic64_t *v)
  1457. {
  1458. return arch_atomic64_fetch_sub_release(1, v);
  1459. }
  1460. #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
  1461. #endif
  1462. #ifndef arch_atomic64_fetch_dec_relaxed
  1463. static __always_inline s64
  1464. arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
  1465. {
  1466. return arch_atomic64_fetch_sub_relaxed(1, v);
  1467. }
  1468. #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
  1469. #endif
  1470. #else /* arch_atomic64_fetch_dec_relaxed */
  1471. #ifndef arch_atomic64_fetch_dec_acquire
  1472. static __always_inline s64
  1473. arch_atomic64_fetch_dec_acquire(atomic64_t *v)
  1474. {
  1475. s64 ret = arch_atomic64_fetch_dec_relaxed(v);
  1476. __atomic_acquire_fence();
  1477. return ret;
  1478. }
  1479. #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
  1480. #endif
  1481. #ifndef arch_atomic64_fetch_dec_release
  1482. static __always_inline s64
  1483. arch_atomic64_fetch_dec_release(atomic64_t *v)
  1484. {
  1485. __atomic_release_fence();
  1486. return arch_atomic64_fetch_dec_relaxed(v);
  1487. }
  1488. #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
  1489. #endif
  1490. #ifndef arch_atomic64_fetch_dec
  1491. static __always_inline s64
  1492. arch_atomic64_fetch_dec(atomic64_t *v)
  1493. {
  1494. s64 ret;
  1495. __atomic_pre_full_fence();
  1496. ret = arch_atomic64_fetch_dec_relaxed(v);
  1497. __atomic_post_full_fence();
  1498. return ret;
  1499. }
  1500. #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
  1501. #endif
  1502. #endif /* arch_atomic64_fetch_dec_relaxed */
  1503. #ifndef arch_atomic64_fetch_and_relaxed
  1504. #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
  1505. #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
  1506. #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
  1507. #else /* arch_atomic64_fetch_and_relaxed */
  1508. #ifndef arch_atomic64_fetch_and_acquire
  1509. static __always_inline s64
  1510. arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
  1511. {
  1512. s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
  1513. __atomic_acquire_fence();
  1514. return ret;
  1515. }
  1516. #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
  1517. #endif
  1518. #ifndef arch_atomic64_fetch_and_release
  1519. static __always_inline s64
  1520. arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
  1521. {
  1522. __atomic_release_fence();
  1523. return arch_atomic64_fetch_and_relaxed(i, v);
  1524. }
  1525. #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
  1526. #endif
  1527. #ifndef arch_atomic64_fetch_and
  1528. static __always_inline s64
  1529. arch_atomic64_fetch_and(s64 i, atomic64_t *v)
  1530. {
  1531. s64 ret;
  1532. __atomic_pre_full_fence();
  1533. ret = arch_atomic64_fetch_and_relaxed(i, v);
  1534. __atomic_post_full_fence();
  1535. return ret;
  1536. }
  1537. #define arch_atomic64_fetch_and arch_atomic64_fetch_and
  1538. #endif
  1539. #endif /* arch_atomic64_fetch_and_relaxed */
  1540. #ifndef arch_atomic64_andnot
  1541. static __always_inline void
  1542. arch_atomic64_andnot(s64 i, atomic64_t *v)
  1543. {
  1544. arch_atomic64_and(~i, v);
  1545. }
  1546. #define arch_atomic64_andnot arch_atomic64_andnot
  1547. #endif
  1548. #ifndef arch_atomic64_fetch_andnot_relaxed
  1549. #ifdef arch_atomic64_fetch_andnot
  1550. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
  1551. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
  1552. #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
  1553. #endif /* arch_atomic64_fetch_andnot */
  1554. #ifndef arch_atomic64_fetch_andnot
  1555. static __always_inline s64
  1556. arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
  1557. {
  1558. return arch_atomic64_fetch_and(~i, v);
  1559. }
  1560. #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
  1561. #endif
  1562. #ifndef arch_atomic64_fetch_andnot_acquire
  1563. static __always_inline s64
  1564. arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
  1565. {
  1566. return arch_atomic64_fetch_and_acquire(~i, v);
  1567. }
  1568. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
  1569. #endif
  1570. #ifndef arch_atomic64_fetch_andnot_release
  1571. static __always_inline s64
  1572. arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
  1573. {
  1574. return arch_atomic64_fetch_and_release(~i, v);
  1575. }
  1576. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
  1577. #endif
  1578. #ifndef arch_atomic64_fetch_andnot_relaxed
  1579. static __always_inline s64
  1580. arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
  1581. {
  1582. return arch_atomic64_fetch_and_relaxed(~i, v);
  1583. }
  1584. #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
  1585. #endif
  1586. #else /* arch_atomic64_fetch_andnot_relaxed */
  1587. #ifndef arch_atomic64_fetch_andnot_acquire
  1588. static __always_inline s64
  1589. arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
  1590. {
  1591. s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
  1592. __atomic_acquire_fence();
  1593. return ret;
  1594. }
  1595. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
  1596. #endif
  1597. #ifndef arch_atomic64_fetch_andnot_release
  1598. static __always_inline s64
  1599. arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
  1600. {
  1601. __atomic_release_fence();
  1602. return arch_atomic64_fetch_andnot_relaxed(i, v);
  1603. }
  1604. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
  1605. #endif
  1606. #ifndef arch_atomic64_fetch_andnot
  1607. static __always_inline s64
  1608. arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
  1609. {
  1610. s64 ret;
  1611. __atomic_pre_full_fence();
  1612. ret = arch_atomic64_fetch_andnot_relaxed(i, v);
  1613. __atomic_post_full_fence();
  1614. return ret;
  1615. }
  1616. #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
  1617. #endif
  1618. #endif /* arch_atomic64_fetch_andnot_relaxed */
  1619. #ifndef arch_atomic64_fetch_or_relaxed
  1620. #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
  1621. #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
  1622. #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
  1623. #else /* arch_atomic64_fetch_or_relaxed */
  1624. #ifndef arch_atomic64_fetch_or_acquire
  1625. static __always_inline s64
  1626. arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
  1627. {
  1628. s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
  1629. __atomic_acquire_fence();
  1630. return ret;
  1631. }
  1632. #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
  1633. #endif
  1634. #ifndef arch_atomic64_fetch_or_release
  1635. static __always_inline s64
  1636. arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
  1637. {
  1638. __atomic_release_fence();
  1639. return arch_atomic64_fetch_or_relaxed(i, v);
  1640. }
  1641. #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
  1642. #endif
  1643. #ifndef arch_atomic64_fetch_or
  1644. static __always_inline s64
  1645. arch_atomic64_fetch_or(s64 i, atomic64_t *v)
  1646. {
  1647. s64 ret;
  1648. __atomic_pre_full_fence();
  1649. ret = arch_atomic64_fetch_or_relaxed(i, v);
  1650. __atomic_post_full_fence();
  1651. return ret;
  1652. }
  1653. #define arch_atomic64_fetch_or arch_atomic64_fetch_or
  1654. #endif
  1655. #endif /* arch_atomic64_fetch_or_relaxed */
  1656. #ifndef arch_atomic64_fetch_xor_relaxed
  1657. #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
  1658. #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
  1659. #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
  1660. #else /* arch_atomic64_fetch_xor_relaxed */
  1661. #ifndef arch_atomic64_fetch_xor_acquire
  1662. static __always_inline s64
  1663. arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
  1664. {
  1665. s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
  1666. __atomic_acquire_fence();
  1667. return ret;
  1668. }
  1669. #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
  1670. #endif
  1671. #ifndef arch_atomic64_fetch_xor_release
  1672. static __always_inline s64
  1673. arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
  1674. {
  1675. __atomic_release_fence();
  1676. return arch_atomic64_fetch_xor_relaxed(i, v);
  1677. }
  1678. #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
  1679. #endif
  1680. #ifndef arch_atomic64_fetch_xor
  1681. static __always_inline s64
  1682. arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
  1683. {
  1684. s64 ret;
  1685. __atomic_pre_full_fence();
  1686. ret = arch_atomic64_fetch_xor_relaxed(i, v);
  1687. __atomic_post_full_fence();
  1688. return ret;
  1689. }
  1690. #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
  1691. #endif
  1692. #endif /* arch_atomic64_fetch_xor_relaxed */
  1693. #ifndef arch_atomic64_xchg_relaxed
  1694. #define arch_atomic64_xchg_acquire arch_atomic64_xchg
  1695. #define arch_atomic64_xchg_release arch_atomic64_xchg
  1696. #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
  1697. #else /* arch_atomic64_xchg_relaxed */
  1698. #ifndef arch_atomic64_xchg_acquire
  1699. static __always_inline s64
  1700. arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
  1701. {
  1702. s64 ret = arch_atomic64_xchg_relaxed(v, i);
  1703. __atomic_acquire_fence();
  1704. return ret;
  1705. }
  1706. #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
  1707. #endif
  1708. #ifndef arch_atomic64_xchg_release
  1709. static __always_inline s64
  1710. arch_atomic64_xchg_release(atomic64_t *v, s64 i)
  1711. {
  1712. __atomic_release_fence();
  1713. return arch_atomic64_xchg_relaxed(v, i);
  1714. }
  1715. #define arch_atomic64_xchg_release arch_atomic64_xchg_release
  1716. #endif
  1717. #ifndef arch_atomic64_xchg
  1718. static __always_inline s64
  1719. arch_atomic64_xchg(atomic64_t *v, s64 i)
  1720. {
  1721. s64 ret;
  1722. __atomic_pre_full_fence();
  1723. ret = arch_atomic64_xchg_relaxed(v, i);
  1724. __atomic_post_full_fence();
  1725. return ret;
  1726. }
  1727. #define arch_atomic64_xchg arch_atomic64_xchg
  1728. #endif
  1729. #endif /* arch_atomic64_xchg_relaxed */
  1730. #ifndef arch_atomic64_cmpxchg_relaxed
  1731. #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
  1732. #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
  1733. #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
  1734. #else /* arch_atomic64_cmpxchg_relaxed */
  1735. #ifndef arch_atomic64_cmpxchg_acquire
  1736. static __always_inline s64
  1737. arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
  1738. {
  1739. s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
  1740. __atomic_acquire_fence();
  1741. return ret;
  1742. }
  1743. #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
  1744. #endif
  1745. #ifndef arch_atomic64_cmpxchg_release
  1746. static __always_inline s64
  1747. arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
  1748. {
  1749. __atomic_release_fence();
  1750. return arch_atomic64_cmpxchg_relaxed(v, old, new);
  1751. }
  1752. #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
  1753. #endif
  1754. #ifndef arch_atomic64_cmpxchg
  1755. static __always_inline s64
  1756. arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  1757. {
  1758. s64 ret;
  1759. __atomic_pre_full_fence();
  1760. ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
  1761. __atomic_post_full_fence();
  1762. return ret;
  1763. }
  1764. #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
  1765. #endif
  1766. #endif /* arch_atomic64_cmpxchg_relaxed */
  1767. #ifndef arch_atomic64_try_cmpxchg_relaxed
  1768. #ifdef arch_atomic64_try_cmpxchg
  1769. #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
  1770. #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
  1771. #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
  1772. #endif /* arch_atomic64_try_cmpxchg */
  1773. #ifndef arch_atomic64_try_cmpxchg
  1774. static __always_inline bool
  1775. arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  1776. {
  1777. s64 r, o = *old;
  1778. r = arch_atomic64_cmpxchg(v, o, new);
  1779. if (unlikely(r != o))
  1780. *old = r;
  1781. return likely(r == o);
  1782. }
  1783. #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
  1784. #endif
  1785. #ifndef arch_atomic64_try_cmpxchg_acquire
  1786. static __always_inline bool
  1787. arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
  1788. {
  1789. s64 r, o = *old;
  1790. r = arch_atomic64_cmpxchg_acquire(v, o, new);
  1791. if (unlikely(r != o))
  1792. *old = r;
  1793. return likely(r == o);
  1794. }
  1795. #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
  1796. #endif
  1797. #ifndef arch_atomic64_try_cmpxchg_release
  1798. static __always_inline bool
  1799. arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
  1800. {
  1801. s64 r, o = *old;
  1802. r = arch_atomic64_cmpxchg_release(v, o, new);
  1803. if (unlikely(r != o))
  1804. *old = r;
  1805. return likely(r == o);
  1806. }
  1807. #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
  1808. #endif
  1809. #ifndef arch_atomic64_try_cmpxchg_relaxed
  1810. static __always_inline bool
  1811. arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
  1812. {
  1813. s64 r, o = *old;
  1814. r = arch_atomic64_cmpxchg_relaxed(v, o, new);
  1815. if (unlikely(r != o))
  1816. *old = r;
  1817. return likely(r == o);
  1818. }
  1819. #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
  1820. #endif
  1821. #else /* arch_atomic64_try_cmpxchg_relaxed */
  1822. #ifndef arch_atomic64_try_cmpxchg_acquire
  1823. static __always_inline bool
  1824. arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
  1825. {
  1826. bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
  1827. __atomic_acquire_fence();
  1828. return ret;
  1829. }
  1830. #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
  1831. #endif
  1832. #ifndef arch_atomic64_try_cmpxchg_release
  1833. static __always_inline bool
  1834. arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
  1835. {
  1836. __atomic_release_fence();
  1837. return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
  1838. }
  1839. #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
  1840. #endif
  1841. #ifndef arch_atomic64_try_cmpxchg
  1842. static __always_inline bool
  1843. arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  1844. {
  1845. bool ret;
  1846. __atomic_pre_full_fence();
  1847. ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
  1848. __atomic_post_full_fence();
  1849. return ret;
  1850. }
  1851. #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
  1852. #endif
  1853. #endif /* arch_atomic64_try_cmpxchg_relaxed */
  1854. #ifndef arch_atomic64_sub_and_test
  1855. /**
  1856. * arch_atomic64_sub_and_test - subtract value from variable and test result
  1857. * @i: integer value to subtract
  1858. * @v: pointer of type atomic64_t
  1859. *
  1860. * Atomically subtracts @i from @v and returns
  1861. * true if the result is zero, or false for all
  1862. * other cases.
  1863. */
  1864. static __always_inline bool
  1865. arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
  1866. {
  1867. return arch_atomic64_sub_return(i, v) == 0;
  1868. }
  1869. #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
  1870. #endif
  1871. #ifndef arch_atomic64_dec_and_test
  1872. /**
  1873. * arch_atomic64_dec_and_test - decrement and test
  1874. * @v: pointer of type atomic64_t
  1875. *
  1876. * Atomically decrements @v by 1 and
  1877. * returns true if the result is 0, or false for all other
  1878. * cases.
  1879. */
  1880. static __always_inline bool
  1881. arch_atomic64_dec_and_test(atomic64_t *v)
  1882. {
  1883. return arch_atomic64_dec_return(v) == 0;
  1884. }
  1885. #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
  1886. #endif
  1887. #ifndef arch_atomic64_inc_and_test
  1888. /**
  1889. * arch_atomic64_inc_and_test - increment and test
  1890. * @v: pointer of type atomic64_t
  1891. *
  1892. * Atomically increments @v by 1
  1893. * and returns true if the result is zero, or false for all
  1894. * other cases.
  1895. */
  1896. static __always_inline bool
  1897. arch_atomic64_inc_and_test(atomic64_t *v)
  1898. {
  1899. return arch_atomic64_inc_return(v) == 0;
  1900. }
  1901. #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
  1902. #endif
  1903. #ifndef arch_atomic64_add_negative
  1904. /**
  1905. * arch_atomic64_add_negative - add and test if negative
  1906. * @i: integer value to add
  1907. * @v: pointer of type atomic64_t
  1908. *
  1909. * Atomically adds @i to @v and returns true
  1910. * if the result is negative, or false when
  1911. * result is greater than or equal to zero.
  1912. */
  1913. static __always_inline bool
  1914. arch_atomic64_add_negative(s64 i, atomic64_t *v)
  1915. {
  1916. return arch_atomic64_add_return(i, v) < 0;
  1917. }
  1918. #define arch_atomic64_add_negative arch_atomic64_add_negative
  1919. #endif
  1920. #ifndef arch_atomic64_fetch_add_unless
  1921. /**
  1922. * arch_atomic64_fetch_add_unless - add unless the number is already a given value
  1923. * @v: pointer of type atomic64_t
  1924. * @a: the amount to add to v...
  1925. * @u: ...unless v is equal to u.
  1926. *
  1927. * Atomically adds @a to @v, so long as @v was not already @u.
  1928. * Returns original value of @v
  1929. */
  1930. static __always_inline s64
  1931. arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
  1932. {
  1933. s64 c = arch_atomic64_read(v);
  1934. do {
  1935. if (unlikely(c == u))
  1936. break;
  1937. } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
  1938. return c;
  1939. }
  1940. #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
  1941. #endif
  1942. #ifndef arch_atomic64_add_unless
  1943. /**
  1944. * arch_atomic64_add_unless - add unless the number is already a given value
  1945. * @v: pointer of type atomic64_t
  1946. * @a: the amount to add to v...
  1947. * @u: ...unless v is equal to u.
  1948. *
  1949. * Atomically adds @a to @v, if @v was not already @u.
  1950. * Returns true if the addition was done.
  1951. */
  1952. static __always_inline bool
  1953. arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
  1954. {
  1955. return arch_atomic64_fetch_add_unless(v, a, u) != u;
  1956. }
  1957. #define arch_atomic64_add_unless arch_atomic64_add_unless
  1958. #endif
  1959. #ifndef arch_atomic64_inc_not_zero
  1960. /**
  1961. * arch_atomic64_inc_not_zero - increment unless the number is zero
  1962. * @v: pointer of type atomic64_t
  1963. *
  1964. * Atomically increments @v by 1, if @v is non-zero.
  1965. * Returns true if the increment was done.
  1966. */
  1967. static __always_inline bool
  1968. arch_atomic64_inc_not_zero(atomic64_t *v)
  1969. {
  1970. return arch_atomic64_add_unless(v, 1, 0);
  1971. }
  1972. #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
  1973. #endif
  1974. #ifndef arch_atomic64_inc_unless_negative
  1975. static __always_inline bool
  1976. arch_atomic64_inc_unless_negative(atomic64_t *v)
  1977. {
  1978. s64 c = arch_atomic64_read(v);
  1979. do {
  1980. if (unlikely(c < 0))
  1981. return false;
  1982. } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
  1983. return true;
  1984. }
  1985. #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
  1986. #endif
  1987. #ifndef arch_atomic64_dec_unless_positive
  1988. static __always_inline bool
  1989. arch_atomic64_dec_unless_positive(atomic64_t *v)
  1990. {
  1991. s64 c = arch_atomic64_read(v);
  1992. do {
  1993. if (unlikely(c > 0))
  1994. return false;
  1995. } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
  1996. return true;
  1997. }
  1998. #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
  1999. #endif
  2000. #ifndef arch_atomic64_dec_if_positive
  2001. static __always_inline s64
  2002. arch_atomic64_dec_if_positive(atomic64_t *v)
  2003. {
  2004. s64 dec, c = arch_atomic64_read(v);
  2005. do {
  2006. dec = c - 1;
  2007. if (unlikely(dec < 0))
  2008. break;
  2009. } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
  2010. return dec;
  2011. }
  2012. #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
  2013. #endif
  2014. #endif /* _LINUX_ATOMIC_FALLBACK_H */
  2015. // 90cd26cfd69d2250303d654955a0cc12620fb91b