percpu.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. #ifndef _ASM_GENERIC_PERCPU_H_
  2. #define _ASM_GENERIC_PERCPU_H_
  3. #include <linux/compiler.h>
  4. #include <linux/threads.h>
  5. #include <linux/percpu-defs.h>
  6. #ifdef CONFIG_SMP
  7. /*
  8. * per_cpu_offset() is the offset that has to be added to a
  9. * percpu variable to get to the instance for a certain processor.
  10. *
  11. * Most arches use the __per_cpu_offset array for those offsets but
  12. * some arches have their own ways of determining the offset (x86_64, s390).
  13. */
  14. #ifndef __per_cpu_offset
  15. extern unsigned long __per_cpu_offset[NR_CPUS];
  16. #define per_cpu_offset(x) (__per_cpu_offset[x])
  17. #endif
  18. /*
  19. * Determine the offset for the currently active processor.
  20. * An arch may define __my_cpu_offset to provide a more effective
  21. * means of obtaining the offset to the per cpu variables of the
  22. * current processor.
  23. */
  24. #ifndef __my_cpu_offset
  25. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  26. #endif
  27. #ifdef CONFIG_DEBUG_PREEMPT
  28. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  29. #else
  30. #define my_cpu_offset __my_cpu_offset
  31. #endif
  32. /*
  33. * Arch may define arch_raw_cpu_ptr() to provide more efficient address
  34. * translations for raw_cpu_ptr().
  35. */
  36. #ifndef arch_raw_cpu_ptr
  37. #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
  38. #endif
  39. #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  40. extern void setup_per_cpu_areas(void);
  41. #endif
  42. #endif /* SMP */
  43. #ifndef PER_CPU_BASE_SECTION
  44. #ifdef CONFIG_SMP
  45. #define PER_CPU_BASE_SECTION ".data..percpu"
  46. #else
  47. #define PER_CPU_BASE_SECTION ".data"
  48. #endif
  49. #endif
  50. #ifndef PER_CPU_ATTRIBUTES
  51. #define PER_CPU_ATTRIBUTES
  52. #endif
  53. #ifndef PER_CPU_DEF_ATTRIBUTES
  54. #define PER_CPU_DEF_ATTRIBUTES
  55. #endif
  56. #define raw_cpu_generic_read(pcp) \
  57. ({ \
  58. *raw_cpu_ptr(&(pcp)); \
  59. })
  60. #define raw_cpu_generic_to_op(pcp, val, op) \
  61. do { \
  62. *raw_cpu_ptr(&(pcp)) op val; \
  63. } while (0)
  64. #define raw_cpu_generic_add_return(pcp, val) \
  65. ({ \
  66. typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
  67. \
  68. *__p += val; \
  69. *__p; \
  70. })
  71. #define raw_cpu_generic_xchg(pcp, nval) \
  72. ({ \
  73. typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
  74. typeof(pcp) __ret; \
  75. __ret = *__p; \
  76. *__p = nval; \
  77. __ret; \
  78. })
  79. #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
  80. ({ \
  81. typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
  82. typeof(pcp) __ret; \
  83. __ret = *__p; \
  84. if (__ret == (oval)) \
  85. *__p = nval; \
  86. __ret; \
  87. })
  88. #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  89. ({ \
  90. typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
  91. typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
  92. int __ret = 0; \
  93. if (*__p1 == (oval1) && *__p2 == (oval2)) { \
  94. *__p1 = nval1; \
  95. *__p2 = nval2; \
  96. __ret = 1; \
  97. } \
  98. (__ret); \
  99. })
  100. #define __this_cpu_generic_read_nopreempt(pcp) \
  101. ({ \
  102. typeof(pcp) __ret; \
  103. preempt_disable_notrace(); \
  104. __ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
  105. preempt_enable_notrace(); \
  106. __ret; \
  107. })
  108. #define __this_cpu_generic_read_noirq(pcp) \
  109. ({ \
  110. typeof(pcp) __ret; \
  111. unsigned long __flags; \
  112. raw_local_irq_save(__flags); \
  113. __ret = raw_cpu_generic_read(pcp); \
  114. raw_local_irq_restore(__flags); \
  115. __ret; \
  116. })
  117. #define this_cpu_generic_read(pcp) \
  118. ({ \
  119. typeof(pcp) __ret; \
  120. if (__native_word(pcp)) \
  121. __ret = __this_cpu_generic_read_nopreempt(pcp); \
  122. else \
  123. __ret = __this_cpu_generic_read_noirq(pcp); \
  124. __ret; \
  125. })
  126. #define this_cpu_generic_to_op(pcp, val, op) \
  127. do { \
  128. unsigned long __flags; \
  129. raw_local_irq_save(__flags); \
  130. raw_cpu_generic_to_op(pcp, val, op); \
  131. raw_local_irq_restore(__flags); \
  132. } while (0)
  133. #define this_cpu_generic_add_return(pcp, val) \
  134. ({ \
  135. typeof(pcp) __ret; \
  136. unsigned long __flags; \
  137. raw_local_irq_save(__flags); \
  138. __ret = raw_cpu_generic_add_return(pcp, val); \
  139. raw_local_irq_restore(__flags); \
  140. __ret; \
  141. })
  142. #define this_cpu_generic_xchg(pcp, nval) \
  143. ({ \
  144. typeof(pcp) __ret; \
  145. unsigned long __flags; \
  146. raw_local_irq_save(__flags); \
  147. __ret = raw_cpu_generic_xchg(pcp, nval); \
  148. raw_local_irq_restore(__flags); \
  149. __ret; \
  150. })
  151. #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
  152. ({ \
  153. typeof(pcp) __ret; \
  154. unsigned long __flags; \
  155. raw_local_irq_save(__flags); \
  156. __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
  157. raw_local_irq_restore(__flags); \
  158. __ret; \
  159. })
  160. #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  161. ({ \
  162. int __ret; \
  163. unsigned long __flags; \
  164. raw_local_irq_save(__flags); \
  165. __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
  166. oval1, oval2, nval1, nval2); \
  167. raw_local_irq_restore(__flags); \
  168. __ret; \
  169. })
  170. #ifndef raw_cpu_read_1
  171. #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
  172. #endif
  173. #ifndef raw_cpu_read_2
  174. #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
  175. #endif
  176. #ifndef raw_cpu_read_4
  177. #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
  178. #endif
  179. #ifndef raw_cpu_read_8
  180. #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
  181. #endif
  182. #ifndef raw_cpu_write_1
  183. #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  184. #endif
  185. #ifndef raw_cpu_write_2
  186. #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  187. #endif
  188. #ifndef raw_cpu_write_4
  189. #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  190. #endif
  191. #ifndef raw_cpu_write_8
  192. #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  193. #endif
  194. #ifndef raw_cpu_add_1
  195. #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  196. #endif
  197. #ifndef raw_cpu_add_2
  198. #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  199. #endif
  200. #ifndef raw_cpu_add_4
  201. #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  202. #endif
  203. #ifndef raw_cpu_add_8
  204. #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  205. #endif
  206. #ifndef raw_cpu_and_1
  207. #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  208. #endif
  209. #ifndef raw_cpu_and_2
  210. #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  211. #endif
  212. #ifndef raw_cpu_and_4
  213. #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  214. #endif
  215. #ifndef raw_cpu_and_8
  216. #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  217. #endif
  218. #ifndef raw_cpu_or_1
  219. #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  220. #endif
  221. #ifndef raw_cpu_or_2
  222. #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  223. #endif
  224. #ifndef raw_cpu_or_4
  225. #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  226. #endif
  227. #ifndef raw_cpu_or_8
  228. #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  229. #endif
  230. #ifndef raw_cpu_add_return_1
  231. #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
  232. #endif
  233. #ifndef raw_cpu_add_return_2
  234. #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
  235. #endif
  236. #ifndef raw_cpu_add_return_4
  237. #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
  238. #endif
  239. #ifndef raw_cpu_add_return_8
  240. #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
  241. #endif
  242. #ifndef raw_cpu_xchg_1
  243. #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  244. #endif
  245. #ifndef raw_cpu_xchg_2
  246. #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  247. #endif
  248. #ifndef raw_cpu_xchg_4
  249. #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  250. #endif
  251. #ifndef raw_cpu_xchg_8
  252. #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  253. #endif
  254. #ifndef raw_cpu_cmpxchg_1
  255. #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
  256. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  257. #endif
  258. #ifndef raw_cpu_cmpxchg_2
  259. #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
  260. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  261. #endif
  262. #ifndef raw_cpu_cmpxchg_4
  263. #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
  264. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  265. #endif
  266. #ifndef raw_cpu_cmpxchg_8
  267. #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
  268. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  269. #endif
  270. #ifndef raw_cpu_cmpxchg_double_1
  271. #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  272. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  273. #endif
  274. #ifndef raw_cpu_cmpxchg_double_2
  275. #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  276. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  277. #endif
  278. #ifndef raw_cpu_cmpxchg_double_4
  279. #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  280. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  281. #endif
  282. #ifndef raw_cpu_cmpxchg_double_8
  283. #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  284. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  285. #endif
  286. #ifndef this_cpu_read_1
  287. #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
  288. #endif
  289. #ifndef this_cpu_read_2
  290. #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
  291. #endif
  292. #ifndef this_cpu_read_4
  293. #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
  294. #endif
  295. #ifndef this_cpu_read_8
  296. #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
  297. #endif
  298. #ifndef this_cpu_write_1
  299. #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  300. #endif
  301. #ifndef this_cpu_write_2
  302. #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  303. #endif
  304. #ifndef this_cpu_write_4
  305. #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  306. #endif
  307. #ifndef this_cpu_write_8
  308. #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  309. #endif
  310. #ifndef this_cpu_add_1
  311. #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  312. #endif
  313. #ifndef this_cpu_add_2
  314. #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  315. #endif
  316. #ifndef this_cpu_add_4
  317. #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  318. #endif
  319. #ifndef this_cpu_add_8
  320. #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  321. #endif
  322. #ifndef this_cpu_and_1
  323. #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  324. #endif
  325. #ifndef this_cpu_and_2
  326. #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  327. #endif
  328. #ifndef this_cpu_and_4
  329. #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  330. #endif
  331. #ifndef this_cpu_and_8
  332. #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  333. #endif
  334. #ifndef this_cpu_or_1
  335. #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  336. #endif
  337. #ifndef this_cpu_or_2
  338. #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  339. #endif
  340. #ifndef this_cpu_or_4
  341. #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  342. #endif
  343. #ifndef this_cpu_or_8
  344. #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  345. #endif
  346. #ifndef this_cpu_add_return_1
  347. #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
  348. #endif
  349. #ifndef this_cpu_add_return_2
  350. #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
  351. #endif
  352. #ifndef this_cpu_add_return_4
  353. #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
  354. #endif
  355. #ifndef this_cpu_add_return_8
  356. #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
  357. #endif
  358. #ifndef this_cpu_xchg_1
  359. #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  360. #endif
  361. #ifndef this_cpu_xchg_2
  362. #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  363. #endif
  364. #ifndef this_cpu_xchg_4
  365. #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  366. #endif
  367. #ifndef this_cpu_xchg_8
  368. #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  369. #endif
  370. #ifndef this_cpu_cmpxchg_1
  371. #define this_cpu_cmpxchg_1(pcp, oval, nval) \
  372. this_cpu_generic_cmpxchg(pcp, oval, nval)
  373. #endif
  374. #ifndef this_cpu_cmpxchg_2
  375. #define this_cpu_cmpxchg_2(pcp, oval, nval) \
  376. this_cpu_generic_cmpxchg(pcp, oval, nval)
  377. #endif
  378. #ifndef this_cpu_cmpxchg_4
  379. #define this_cpu_cmpxchg_4(pcp, oval, nval) \
  380. this_cpu_generic_cmpxchg(pcp, oval, nval)
  381. #endif
  382. #ifndef this_cpu_cmpxchg_8
  383. #define this_cpu_cmpxchg_8(pcp, oval, nval) \
  384. this_cpu_generic_cmpxchg(pcp, oval, nval)
  385. #endif
  386. #ifndef this_cpu_cmpxchg_double_1
  387. #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  388. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  389. #endif
  390. #ifndef this_cpu_cmpxchg_double_2
  391. #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  392. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  393. #endif
  394. #ifndef this_cpu_cmpxchg_double_4
  395. #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  396. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  397. #endif
  398. #ifndef this_cpu_cmpxchg_double_8
  399. #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  400. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  401. #endif
  402. #endif /* _ASM_GENERIC_PERCPU_H_ */