percpu.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421
  1. #ifndef _ASM_GENERIC_PERCPU_H_
  2. #define _ASM_GENERIC_PERCPU_H_
  3. #include <linux/compiler.h>
  4. #include <linux/threads.h>
  5. #include <linux/percpu-defs.h>
  6. #ifdef CONFIG_SMP
  7. /*
  8. * per_cpu_offset() is the offset that has to be added to a
  9. * percpu variable to get to the instance for a certain processor.
  10. *
  11. * Most arches use the __per_cpu_offset array for those offsets but
  12. * some arches have their own ways of determining the offset (x86_64, s390).
  13. */
  14. #ifndef __per_cpu_offset
  15. extern unsigned long __per_cpu_offset[NR_CPUS];
  16. #define per_cpu_offset(x) (__per_cpu_offset[x])
  17. #endif
  18. /*
  19. * Determine the offset for the currently active processor.
  20. * An arch may define __my_cpu_offset to provide a more effective
  21. * means of obtaining the offset to the per cpu variables of the
  22. * current processor.
  23. */
  24. #ifndef __my_cpu_offset
  25. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  26. #endif
  27. #ifdef CONFIG_DEBUG_PREEMPT
  28. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  29. #else
  30. #define my_cpu_offset __my_cpu_offset
  31. #endif
  32. /*
  33. * Arch may define arch_raw_cpu_ptr() to provide more efficient address
  34. * translations for raw_cpu_ptr().
  35. */
  36. #ifndef arch_raw_cpu_ptr
  37. #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
  38. #endif
  39. #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  40. extern void setup_per_cpu_areas(void);
  41. #endif
  42. #endif /* SMP */
  43. #ifndef PER_CPU_BASE_SECTION
  44. #ifdef CONFIG_SMP
  45. #define PER_CPU_BASE_SECTION ".data..percpu"
  46. #else
  47. #define PER_CPU_BASE_SECTION ".data"
  48. #endif
  49. #endif
  50. #ifndef PER_CPU_ATTRIBUTES
  51. #define PER_CPU_ATTRIBUTES
  52. #endif
  53. #ifndef PER_CPU_DEF_ATTRIBUTES
  54. #define PER_CPU_DEF_ATTRIBUTES
  55. #endif
  56. #define raw_cpu_generic_to_op(pcp, val, op) \
  57. do { \
  58. *raw_cpu_ptr(&(pcp)) op val; \
  59. } while (0)
  60. #define raw_cpu_generic_add_return(pcp, val) \
  61. ({ \
  62. raw_cpu_add(pcp, val); \
  63. raw_cpu_read(pcp); \
  64. })
  65. #define raw_cpu_generic_xchg(pcp, nval) \
  66. ({ \
  67. typeof(pcp) __ret; \
  68. __ret = raw_cpu_read(pcp); \
  69. raw_cpu_write(pcp, nval); \
  70. __ret; \
  71. })
  72. #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
  73. ({ \
  74. typeof(pcp) __ret; \
  75. __ret = raw_cpu_read(pcp); \
  76. if (__ret == (oval)) \
  77. raw_cpu_write(pcp, nval); \
  78. __ret; \
  79. })
  80. #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  81. ({ \
  82. int __ret = 0; \
  83. if (raw_cpu_read(pcp1) == (oval1) && \
  84. raw_cpu_read(pcp2) == (oval2)) { \
  85. raw_cpu_write(pcp1, nval1); \
  86. raw_cpu_write(pcp2, nval2); \
  87. __ret = 1; \
  88. } \
  89. (__ret); \
  90. })
  91. #define this_cpu_generic_read(pcp) \
  92. ({ \
  93. typeof(pcp) __ret; \
  94. preempt_disable(); \
  95. __ret = *this_cpu_ptr(&(pcp)); \
  96. preempt_enable(); \
  97. __ret; \
  98. })
  99. #define this_cpu_generic_to_op(pcp, val, op) \
  100. do { \
  101. unsigned long __flags; \
  102. raw_local_irq_save(__flags); \
  103. *raw_cpu_ptr(&(pcp)) op val; \
  104. raw_local_irq_restore(__flags); \
  105. } while (0)
  106. #define this_cpu_generic_add_return(pcp, val) \
  107. ({ \
  108. typeof(pcp) __ret; \
  109. unsigned long __flags; \
  110. raw_local_irq_save(__flags); \
  111. raw_cpu_add(pcp, val); \
  112. __ret = raw_cpu_read(pcp); \
  113. raw_local_irq_restore(__flags); \
  114. __ret; \
  115. })
  116. #define this_cpu_generic_xchg(pcp, nval) \
  117. ({ \
  118. typeof(pcp) __ret; \
  119. unsigned long __flags; \
  120. raw_local_irq_save(__flags); \
  121. __ret = raw_cpu_read(pcp); \
  122. raw_cpu_write(pcp, nval); \
  123. raw_local_irq_restore(__flags); \
  124. __ret; \
  125. })
  126. #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
  127. ({ \
  128. typeof(pcp) __ret; \
  129. unsigned long __flags; \
  130. raw_local_irq_save(__flags); \
  131. __ret = raw_cpu_read(pcp); \
  132. if (__ret == (oval)) \
  133. raw_cpu_write(pcp, nval); \
  134. raw_local_irq_restore(__flags); \
  135. __ret; \
  136. })
  137. #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  138. ({ \
  139. int __ret; \
  140. unsigned long __flags; \
  141. raw_local_irq_save(__flags); \
  142. __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
  143. oval1, oval2, nval1, nval2); \
  144. raw_local_irq_restore(__flags); \
  145. __ret; \
  146. })
  147. #ifndef raw_cpu_read_1
  148. #define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
  149. #endif
  150. #ifndef raw_cpu_read_2
  151. #define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
  152. #endif
  153. #ifndef raw_cpu_read_4
  154. #define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
  155. #endif
  156. #ifndef raw_cpu_read_8
  157. #define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
  158. #endif
  159. #ifndef raw_cpu_write_1
  160. #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  161. #endif
  162. #ifndef raw_cpu_write_2
  163. #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  164. #endif
  165. #ifndef raw_cpu_write_4
  166. #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  167. #endif
  168. #ifndef raw_cpu_write_8
  169. #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  170. #endif
  171. #ifndef raw_cpu_add_1
  172. #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  173. #endif
  174. #ifndef raw_cpu_add_2
  175. #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  176. #endif
  177. #ifndef raw_cpu_add_4
  178. #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  179. #endif
  180. #ifndef raw_cpu_add_8
  181. #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  182. #endif
  183. #ifndef raw_cpu_and_1
  184. #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  185. #endif
  186. #ifndef raw_cpu_and_2
  187. #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  188. #endif
  189. #ifndef raw_cpu_and_4
  190. #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  191. #endif
  192. #ifndef raw_cpu_and_8
  193. #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  194. #endif
  195. #ifndef raw_cpu_or_1
  196. #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  197. #endif
  198. #ifndef raw_cpu_or_2
  199. #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  200. #endif
  201. #ifndef raw_cpu_or_4
  202. #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  203. #endif
  204. #ifndef raw_cpu_or_8
  205. #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  206. #endif
  207. #ifndef raw_cpu_add_return_1
  208. #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
  209. #endif
  210. #ifndef raw_cpu_add_return_2
  211. #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
  212. #endif
  213. #ifndef raw_cpu_add_return_4
  214. #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
  215. #endif
  216. #ifndef raw_cpu_add_return_8
  217. #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
  218. #endif
  219. #ifndef raw_cpu_xchg_1
  220. #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  221. #endif
  222. #ifndef raw_cpu_xchg_2
  223. #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  224. #endif
  225. #ifndef raw_cpu_xchg_4
  226. #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  227. #endif
  228. #ifndef raw_cpu_xchg_8
  229. #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  230. #endif
  231. #ifndef raw_cpu_cmpxchg_1
  232. #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
  233. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  234. #endif
  235. #ifndef raw_cpu_cmpxchg_2
  236. #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
  237. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  238. #endif
  239. #ifndef raw_cpu_cmpxchg_4
  240. #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
  241. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  242. #endif
  243. #ifndef raw_cpu_cmpxchg_8
  244. #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
  245. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  246. #endif
  247. #ifndef raw_cpu_cmpxchg_double_1
  248. #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  249. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  250. #endif
  251. #ifndef raw_cpu_cmpxchg_double_2
  252. #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  253. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  254. #endif
  255. #ifndef raw_cpu_cmpxchg_double_4
  256. #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  257. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  258. #endif
  259. #ifndef raw_cpu_cmpxchg_double_8
  260. #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  261. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  262. #endif
  263. #ifndef this_cpu_read_1
  264. #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
  265. #endif
  266. #ifndef this_cpu_read_2
  267. #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
  268. #endif
  269. #ifndef this_cpu_read_4
  270. #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
  271. #endif
  272. #ifndef this_cpu_read_8
  273. #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
  274. #endif
  275. #ifndef this_cpu_write_1
  276. #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  277. #endif
  278. #ifndef this_cpu_write_2
  279. #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  280. #endif
  281. #ifndef this_cpu_write_4
  282. #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  283. #endif
  284. #ifndef this_cpu_write_8
  285. #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  286. #endif
  287. #ifndef this_cpu_add_1
  288. #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  289. #endif
  290. #ifndef this_cpu_add_2
  291. #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  292. #endif
  293. #ifndef this_cpu_add_4
  294. #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  295. #endif
  296. #ifndef this_cpu_add_8
  297. #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  298. #endif
  299. #ifndef this_cpu_and_1
  300. #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  301. #endif
  302. #ifndef this_cpu_and_2
  303. #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  304. #endif
  305. #ifndef this_cpu_and_4
  306. #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  307. #endif
  308. #ifndef this_cpu_and_8
  309. #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  310. #endif
  311. #ifndef this_cpu_or_1
  312. #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  313. #endif
  314. #ifndef this_cpu_or_2
  315. #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  316. #endif
  317. #ifndef this_cpu_or_4
  318. #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  319. #endif
  320. #ifndef this_cpu_or_8
  321. #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  322. #endif
  323. #ifndef this_cpu_add_return_1
  324. #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
  325. #endif
  326. #ifndef this_cpu_add_return_2
  327. #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
  328. #endif
  329. #ifndef this_cpu_add_return_4
  330. #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
  331. #endif
  332. #ifndef this_cpu_add_return_8
  333. #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
  334. #endif
  335. #ifndef this_cpu_xchg_1
  336. #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  337. #endif
  338. #ifndef this_cpu_xchg_2
  339. #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  340. #endif
  341. #ifndef this_cpu_xchg_4
  342. #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  343. #endif
  344. #ifndef this_cpu_xchg_8
  345. #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  346. #endif
  347. #ifndef this_cpu_cmpxchg_1
  348. #define this_cpu_cmpxchg_1(pcp, oval, nval) \
  349. this_cpu_generic_cmpxchg(pcp, oval, nval)
  350. #endif
  351. #ifndef this_cpu_cmpxchg_2
  352. #define this_cpu_cmpxchg_2(pcp, oval, nval) \
  353. this_cpu_generic_cmpxchg(pcp, oval, nval)
  354. #endif
  355. #ifndef this_cpu_cmpxchg_4
  356. #define this_cpu_cmpxchg_4(pcp, oval, nval) \
  357. this_cpu_generic_cmpxchg(pcp, oval, nval)
  358. #endif
  359. #ifndef this_cpu_cmpxchg_8
  360. #define this_cpu_cmpxchg_8(pcp, oval, nval) \
  361. this_cpu_generic_cmpxchg(pcp, oval, nval)
  362. #endif
  363. #ifndef this_cpu_cmpxchg_double_1
  364. #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  365. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  366. #endif
  367. #ifndef this_cpu_cmpxchg_double_2
  368. #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  369. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  370. #endif
  371. #ifndef this_cpu_cmpxchg_double_4
  372. #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  373. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  374. #endif
  375. #ifndef this_cpu_cmpxchg_double_8
  376. #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  377. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  378. #endif
  379. #endif /* _ASM_GENERIC_PERCPU_H_ */