percpu.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440
  1. #ifndef _ASM_GENERIC_PERCPU_H_
  2. #define _ASM_GENERIC_PERCPU_H_
  3. #include <linux/compiler.h>
  4. #include <linux/threads.h>
  5. #include <linux/percpu-defs.h>
  6. #ifdef CONFIG_SMP
  7. /*
  8. * per_cpu_offset() is the offset that has to be added to a
  9. * percpu variable to get to the instance for a certain processor.
  10. *
  11. * Most arches use the __per_cpu_offset array for those offsets but
  12. * some arches have their own ways of determining the offset (x86_64, s390).
  13. */
  14. #ifndef __per_cpu_offset
  15. extern unsigned long __per_cpu_offset[NR_CPUS];
  16. #define per_cpu_offset(x) (__per_cpu_offset[x])
  17. #endif
  18. /*
  19. * Determine the offset for the currently active processor.
  20. * An arch may define __my_cpu_offset to provide a more effective
  21. * means of obtaining the offset to the per cpu variables of the
  22. * current processor.
  23. */
  24. #ifndef __my_cpu_offset
  25. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  26. #endif
  27. #ifdef CONFIG_DEBUG_PREEMPT
  28. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  29. #else
  30. #define my_cpu_offset __my_cpu_offset
  31. #endif
  32. /*
  33. * Arch may define arch_raw_cpu_ptr() to provide more efficient address
  34. * translations for raw_cpu_ptr().
  35. */
  36. #ifndef arch_raw_cpu_ptr
  37. #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
  38. #endif
  39. #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  40. extern void setup_per_cpu_areas(void);
  41. #endif
  42. #endif /* SMP */
  43. #ifndef PER_CPU_BASE_SECTION
  44. #ifdef CONFIG_SMP
  45. #define PER_CPU_BASE_SECTION ".data..percpu"
  46. #else
  47. #define PER_CPU_BASE_SECTION ".data"
  48. #endif
  49. #endif
  50. #ifndef PER_CPU_ATTRIBUTES
  51. #define PER_CPU_ATTRIBUTES
  52. #endif
  53. #ifndef PER_CPU_DEF_ATTRIBUTES
  54. #define PER_CPU_DEF_ATTRIBUTES
  55. #endif
  56. #define raw_cpu_generic_to_op(pcp, val, op) \
  57. do { \
  58. *raw_cpu_ptr(&(pcp)) op val; \
  59. } while (0)
  60. #define raw_cpu_generic_add_return(pcp, val) \
  61. ({ \
  62. raw_cpu_add(pcp, val); \
  63. raw_cpu_read(pcp); \
  64. })
  65. #define raw_cpu_generic_xchg(pcp, nval) \
  66. ({ \
  67. typeof(pcp) __ret; \
  68. __ret = raw_cpu_read(pcp); \
  69. raw_cpu_write(pcp, nval); \
  70. __ret; \
  71. })
  72. #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
  73. ({ \
  74. typeof(pcp) __ret; \
  75. __ret = raw_cpu_read(pcp); \
  76. if (__ret == (oval)) \
  77. raw_cpu_write(pcp, nval); \
  78. __ret; \
  79. })
  80. #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  81. ({ \
  82. int __ret = 0; \
  83. if (raw_cpu_read(pcp1) == (oval1) && \
  84. raw_cpu_read(pcp2) == (oval2)) { \
  85. raw_cpu_write(pcp1, nval1); \
  86. raw_cpu_write(pcp2, nval2); \
  87. __ret = 1; \
  88. } \
  89. (__ret); \
  90. })
  91. #define __this_cpu_generic_read_nopreempt(pcp) \
  92. ({ \
  93. typeof(pcp) __ret; \
  94. preempt_disable(); \
  95. __ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
  96. preempt_enable(); \
  97. __ret; \
  98. })
  99. #define __this_cpu_generic_read_noirq(pcp) \
  100. ({ \
  101. typeof(pcp) __ret; \
  102. unsigned long __flags; \
  103. raw_local_irq_save(__flags); \
  104. __ret = *raw_cpu_ptr(&(pcp)); \
  105. raw_local_irq_restore(__flags); \
  106. __ret; \
  107. })
  108. #define this_cpu_generic_read(pcp) \
  109. ({ \
  110. typeof(pcp) __ret; \
  111. if (__native_word(pcp)) \
  112. __ret = __this_cpu_generic_read_nopreempt(pcp); \
  113. else \
  114. __ret = __this_cpu_generic_read_noirq(pcp); \
  115. __ret; \
  116. })
  117. #define this_cpu_generic_to_op(pcp, val, op) \
  118. do { \
  119. unsigned long __flags; \
  120. raw_local_irq_save(__flags); \
  121. *raw_cpu_ptr(&(pcp)) op val; \
  122. raw_local_irq_restore(__flags); \
  123. } while (0)
  124. #define this_cpu_generic_add_return(pcp, val) \
  125. ({ \
  126. typeof(pcp) __ret; \
  127. unsigned long __flags; \
  128. raw_local_irq_save(__flags); \
  129. raw_cpu_add(pcp, val); \
  130. __ret = raw_cpu_read(pcp); \
  131. raw_local_irq_restore(__flags); \
  132. __ret; \
  133. })
  134. #define this_cpu_generic_xchg(pcp, nval) \
  135. ({ \
  136. typeof(pcp) __ret; \
  137. unsigned long __flags; \
  138. raw_local_irq_save(__flags); \
  139. __ret = raw_cpu_read(pcp); \
  140. raw_cpu_write(pcp, nval); \
  141. raw_local_irq_restore(__flags); \
  142. __ret; \
  143. })
  144. #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
  145. ({ \
  146. typeof(pcp) __ret; \
  147. unsigned long __flags; \
  148. raw_local_irq_save(__flags); \
  149. __ret = raw_cpu_read(pcp); \
  150. if (__ret == (oval)) \
  151. raw_cpu_write(pcp, nval); \
  152. raw_local_irq_restore(__flags); \
  153. __ret; \
  154. })
  155. #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  156. ({ \
  157. int __ret; \
  158. unsigned long __flags; \
  159. raw_local_irq_save(__flags); \
  160. __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
  161. oval1, oval2, nval1, nval2); \
  162. raw_local_irq_restore(__flags); \
  163. __ret; \
  164. })
  165. #ifndef raw_cpu_read_1
  166. #define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
  167. #endif
  168. #ifndef raw_cpu_read_2
  169. #define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
  170. #endif
  171. #ifndef raw_cpu_read_4
  172. #define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
  173. #endif
  174. #ifndef raw_cpu_read_8
  175. #define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
  176. #endif
  177. #ifndef raw_cpu_write_1
  178. #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  179. #endif
  180. #ifndef raw_cpu_write_2
  181. #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  182. #endif
  183. #ifndef raw_cpu_write_4
  184. #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  185. #endif
  186. #ifndef raw_cpu_write_8
  187. #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  188. #endif
  189. #ifndef raw_cpu_add_1
  190. #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  191. #endif
  192. #ifndef raw_cpu_add_2
  193. #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  194. #endif
  195. #ifndef raw_cpu_add_4
  196. #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  197. #endif
  198. #ifndef raw_cpu_add_8
  199. #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  200. #endif
  201. #ifndef raw_cpu_and_1
  202. #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  203. #endif
  204. #ifndef raw_cpu_and_2
  205. #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  206. #endif
  207. #ifndef raw_cpu_and_4
  208. #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  209. #endif
  210. #ifndef raw_cpu_and_8
  211. #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  212. #endif
  213. #ifndef raw_cpu_or_1
  214. #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  215. #endif
  216. #ifndef raw_cpu_or_2
  217. #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  218. #endif
  219. #ifndef raw_cpu_or_4
  220. #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  221. #endif
  222. #ifndef raw_cpu_or_8
  223. #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  224. #endif
  225. #ifndef raw_cpu_add_return_1
  226. #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
  227. #endif
  228. #ifndef raw_cpu_add_return_2
  229. #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
  230. #endif
  231. #ifndef raw_cpu_add_return_4
  232. #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
  233. #endif
  234. #ifndef raw_cpu_add_return_8
  235. #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
  236. #endif
  237. #ifndef raw_cpu_xchg_1
  238. #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  239. #endif
  240. #ifndef raw_cpu_xchg_2
  241. #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  242. #endif
  243. #ifndef raw_cpu_xchg_4
  244. #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  245. #endif
  246. #ifndef raw_cpu_xchg_8
  247. #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  248. #endif
  249. #ifndef raw_cpu_cmpxchg_1
  250. #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
  251. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  252. #endif
  253. #ifndef raw_cpu_cmpxchg_2
  254. #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
  255. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  256. #endif
  257. #ifndef raw_cpu_cmpxchg_4
  258. #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
  259. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  260. #endif
  261. #ifndef raw_cpu_cmpxchg_8
  262. #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
  263. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  264. #endif
  265. #ifndef raw_cpu_cmpxchg_double_1
  266. #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  267. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  268. #endif
  269. #ifndef raw_cpu_cmpxchg_double_2
  270. #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  271. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  272. #endif
  273. #ifndef raw_cpu_cmpxchg_double_4
  274. #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  275. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  276. #endif
  277. #ifndef raw_cpu_cmpxchg_double_8
  278. #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  279. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  280. #endif
  281. #ifndef this_cpu_read_1
  282. #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
  283. #endif
  284. #ifndef this_cpu_read_2
  285. #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
  286. #endif
  287. #ifndef this_cpu_read_4
  288. #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
  289. #endif
  290. #ifndef this_cpu_read_8
  291. #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
  292. #endif
  293. #ifndef this_cpu_write_1
  294. #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  295. #endif
  296. #ifndef this_cpu_write_2
  297. #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  298. #endif
  299. #ifndef this_cpu_write_4
  300. #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  301. #endif
  302. #ifndef this_cpu_write_8
  303. #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  304. #endif
  305. #ifndef this_cpu_add_1
  306. #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  307. #endif
  308. #ifndef this_cpu_add_2
  309. #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  310. #endif
  311. #ifndef this_cpu_add_4
  312. #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  313. #endif
  314. #ifndef this_cpu_add_8
  315. #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  316. #endif
  317. #ifndef this_cpu_and_1
  318. #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  319. #endif
  320. #ifndef this_cpu_and_2
  321. #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  322. #endif
  323. #ifndef this_cpu_and_4
  324. #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  325. #endif
  326. #ifndef this_cpu_and_8
  327. #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  328. #endif
  329. #ifndef this_cpu_or_1
  330. #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  331. #endif
  332. #ifndef this_cpu_or_2
  333. #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  334. #endif
  335. #ifndef this_cpu_or_4
  336. #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  337. #endif
  338. #ifndef this_cpu_or_8
  339. #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  340. #endif
  341. #ifndef this_cpu_add_return_1
  342. #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
  343. #endif
  344. #ifndef this_cpu_add_return_2
  345. #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
  346. #endif
  347. #ifndef this_cpu_add_return_4
  348. #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
  349. #endif
  350. #ifndef this_cpu_add_return_8
  351. #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
  352. #endif
  353. #ifndef this_cpu_xchg_1
  354. #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  355. #endif
  356. #ifndef this_cpu_xchg_2
  357. #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  358. #endif
  359. #ifndef this_cpu_xchg_4
  360. #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  361. #endif
  362. #ifndef this_cpu_xchg_8
  363. #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  364. #endif
  365. #ifndef this_cpu_cmpxchg_1
  366. #define this_cpu_cmpxchg_1(pcp, oval, nval) \
  367. this_cpu_generic_cmpxchg(pcp, oval, nval)
  368. #endif
  369. #ifndef this_cpu_cmpxchg_2
  370. #define this_cpu_cmpxchg_2(pcp, oval, nval) \
  371. this_cpu_generic_cmpxchg(pcp, oval, nval)
  372. #endif
  373. #ifndef this_cpu_cmpxchg_4
  374. #define this_cpu_cmpxchg_4(pcp, oval, nval) \
  375. this_cpu_generic_cmpxchg(pcp, oval, nval)
  376. #endif
  377. #ifndef this_cpu_cmpxchg_8
  378. #define this_cpu_cmpxchg_8(pcp, oval, nval) \
  379. this_cpu_generic_cmpxchg(pcp, oval, nval)
  380. #endif
  381. #ifndef this_cpu_cmpxchg_double_1
  382. #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  383. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  384. #endif
  385. #ifndef this_cpu_cmpxchg_double_2
  386. #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  387. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  388. #endif
  389. #ifndef this_cpu_cmpxchg_double_4
  390. #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  391. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  392. #endif
  393. #ifndef this_cpu_cmpxchg_double_8
  394. #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  395. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  396. #endif
  397. #endif /* _ASM_GENERIC_PERCPU_H_ */