preempt.h 1.9 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. #ifndef __ASM_PREEMPT_H
  2. #define __ASM_PREEMPT_H
  3. #include <linux/thread_info.h>
  4. #define PREEMPT_ENABLED (0)
  5. static __always_inline int preempt_count(void)
  6. {
  7. return READ_ONCE(current_thread_info()->preempt_count);
  8. }
  9. static __always_inline volatile int *preempt_count_ptr(void)
  10. {
  11. return &current_thread_info()->preempt_count;
  12. }
  13. static __always_inline void preempt_count_set(int pc)
  14. {
  15. *preempt_count_ptr() = pc;
  16. }
  17. /*
  18. * must be macros to avoid header recursion hell
  19. */
  20. #define init_task_preempt_count(p) do { \
  21. task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \
  22. } while (0)
  23. #define init_idle_preempt_count(p, cpu) do { \
  24. task_thread_info(p)->preempt_count = PREEMPT_ENABLED; \
  25. } while (0)
  26. static __always_inline void set_preempt_need_resched(void)
  27. {
  28. }
  29. static __always_inline void clear_preempt_need_resched(void)
  30. {
  31. }
  32. static __always_inline bool test_preempt_need_resched(void)
  33. {
  34. return false;
  35. }
  36. /*
  37. * The various preempt_count add/sub methods
  38. */
  39. static __always_inline void __preempt_count_add(int val)
  40. {
  41. *preempt_count_ptr() += val;
  42. }
  43. static __always_inline void __preempt_count_sub(int val)
  44. {
  45. *preempt_count_ptr() -= val;
  46. }
  47. static __always_inline bool __preempt_count_dec_and_test(void)
  48. {
  49. /*
  50. * Because of load-store architectures cannot do per-cpu atomic
  51. * operations; we cannot use PREEMPT_NEED_RESCHED because it might get
  52. * lost.
  53. */
  54. return !--*preempt_count_ptr() && tif_need_resched();
  55. }
  56. /*
  57. * Returns true when we need to resched and can (barring IRQ state).
  58. */
  59. static __always_inline bool should_resched(int preempt_offset)
  60. {
  61. return unlikely(preempt_count() == preempt_offset &&
  62. tif_need_resched());
  63. }
  64. #ifdef CONFIG_PREEMPT
  65. extern asmlinkage void preempt_schedule(void);
  66. #define __preempt_schedule() preempt_schedule()
  67. extern asmlinkage void preempt_schedule_notrace(void);
  68. #define __preempt_schedule_notrace() preempt_schedule_notrace()
  69. #endif /* CONFIG_PREEMPT */
  70. #endif /* __ASM_PREEMPT_H */