123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448 |
- #ifndef _ASM_GENERIC_PERCPU_H_
- #define _ASM_GENERIC_PERCPU_H_
- #include <linux/compiler.h>
- #include <linux/threads.h>
- #include <linux/percpu-defs.h>
- #ifdef CONFIG_SMP
- /*
- * per_cpu_offset() is the offset that has to be added to a
- * percpu variable to get to the instance for a certain processor.
- *
- * Most arches use the __per_cpu_offset array for those offsets but
- * some arches have their own ways of determining the offset (x86_64, s390).
- */
- #ifndef __per_cpu_offset
- extern unsigned long __per_cpu_offset[NR_CPUS];
- #define per_cpu_offset(x) (__per_cpu_offset[x])
- #endif
- /*
- * Determine the offset for the currently active processor.
- * An arch may define __my_cpu_offset to provide a more effective
- * means of obtaining the offset to the per cpu variables of the
- * current processor.
- */
- #ifndef __my_cpu_offset
- #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
- #endif
- #ifdef CONFIG_DEBUG_PREEMPT
- #define my_cpu_offset per_cpu_offset(smp_processor_id())
- #else
- #define my_cpu_offset __my_cpu_offset
- #endif
- /*
- * Arch may define arch_raw_cpu_ptr() to provide more efficient address
- * translations for raw_cpu_ptr().
- */
- #ifndef arch_raw_cpu_ptr
- #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
- #endif
- #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
- extern void setup_per_cpu_areas(void);
- #endif
- #endif /* SMP */
- #ifndef PER_CPU_BASE_SECTION
- #ifdef CONFIG_SMP
- #define PER_CPU_BASE_SECTION ".data..percpu"
- #else
- #define PER_CPU_BASE_SECTION ".data"
- #endif
- #endif
- #ifndef PER_CPU_ATTRIBUTES
- #define PER_CPU_ATTRIBUTES
- #endif
- #ifndef PER_CPU_DEF_ATTRIBUTES
- #define PER_CPU_DEF_ATTRIBUTES
- #endif
- #define raw_cpu_generic_read(pcp) \
- ({ \
- *raw_cpu_ptr(&(pcp)); \
- })
- #define raw_cpu_generic_to_op(pcp, val, op) \
- do { \
- *raw_cpu_ptr(&(pcp)) op val; \
- } while (0)
- #define raw_cpu_generic_add_return(pcp, val) \
- ({ \
- typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
- \
- *__p += val; \
- *__p; \
- })
- #define raw_cpu_generic_xchg(pcp, nval) \
- ({ \
- typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
- typeof(pcp) __ret; \
- __ret = *__p; \
- *__p = nval; \
- __ret; \
- })
- #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
- ({ \
- typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
- typeof(pcp) __ret; \
- __ret = *__p; \
- if (__ret == (oval)) \
- *__p = nval; \
- __ret; \
- })
- #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- ({ \
- typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
- typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
- int __ret = 0; \
- if (*__p1 == (oval1) && *__p2 == (oval2)) { \
- *__p1 = nval1; \
- *__p2 = nval2; \
- __ret = 1; \
- } \
- (__ret); \
- })
- #define __this_cpu_generic_read_nopreempt(pcp) \
- ({ \
- typeof(pcp) __ret; \
- preempt_disable_notrace(); \
- __ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
- preempt_enable_notrace(); \
- __ret; \
- })
- #define __this_cpu_generic_read_noirq(pcp) \
- ({ \
- typeof(pcp) __ret; \
- unsigned long __flags; \
- raw_local_irq_save(__flags); \
- __ret = raw_cpu_generic_read(pcp); \
- raw_local_irq_restore(__flags); \
- __ret; \
- })
- #define this_cpu_generic_read(pcp) \
- ({ \
- typeof(pcp) __ret; \
- if (__native_word(pcp)) \
- __ret = __this_cpu_generic_read_nopreempt(pcp); \
- else \
- __ret = __this_cpu_generic_read_noirq(pcp); \
- __ret; \
- })
- #define this_cpu_generic_to_op(pcp, val, op) \
- do { \
- unsigned long __flags; \
- raw_local_irq_save(__flags); \
- raw_cpu_generic_to_op(pcp, val, op); \
- raw_local_irq_restore(__flags); \
- } while (0)
- #define this_cpu_generic_add_return(pcp, val) \
- ({ \
- typeof(pcp) __ret; \
- unsigned long __flags; \
- raw_local_irq_save(__flags); \
- __ret = raw_cpu_generic_add_return(pcp, val); \
- raw_local_irq_restore(__flags); \
- __ret; \
- })
- #define this_cpu_generic_xchg(pcp, nval) \
- ({ \
- typeof(pcp) __ret; \
- unsigned long __flags; \
- raw_local_irq_save(__flags); \
- __ret = raw_cpu_generic_xchg(pcp, nval); \
- raw_local_irq_restore(__flags); \
- __ret; \
- })
- #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
- ({ \
- typeof(pcp) __ret; \
- unsigned long __flags; \
- raw_local_irq_save(__flags); \
- __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
- raw_local_irq_restore(__flags); \
- __ret; \
- })
- #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- ({ \
- int __ret; \
- unsigned long __flags; \
- raw_local_irq_save(__flags); \
- __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
- oval1, oval2, nval1, nval2); \
- raw_local_irq_restore(__flags); \
- __ret; \
- })
- #ifndef raw_cpu_read_1
- #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
- #endif
- #ifndef raw_cpu_read_2
- #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
- #endif
- #ifndef raw_cpu_read_4
- #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
- #endif
- #ifndef raw_cpu_read_8
- #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
- #endif
- #ifndef raw_cpu_write_1
- #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef raw_cpu_write_2
- #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef raw_cpu_write_4
- #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef raw_cpu_write_8
- #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef raw_cpu_add_1
- #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef raw_cpu_add_2
- #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef raw_cpu_add_4
- #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef raw_cpu_add_8
- #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef raw_cpu_and_1
- #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef raw_cpu_and_2
- #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef raw_cpu_and_4
- #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef raw_cpu_and_8
- #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef raw_cpu_or_1
- #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef raw_cpu_or_2
- #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef raw_cpu_or_4
- #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef raw_cpu_or_8
- #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef raw_cpu_add_return_1
- #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef raw_cpu_add_return_2
- #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef raw_cpu_add_return_4
- #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef raw_cpu_add_return_8
- #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef raw_cpu_xchg_1
- #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef raw_cpu_xchg_2
- #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef raw_cpu_xchg_4
- #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef raw_cpu_xchg_8
- #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef raw_cpu_cmpxchg_1
- #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
- raw_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef raw_cpu_cmpxchg_2
- #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
- raw_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef raw_cpu_cmpxchg_4
- #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
- raw_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef raw_cpu_cmpxchg_8
- #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
- raw_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef raw_cpu_cmpxchg_double_1
- #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef raw_cpu_cmpxchg_double_2
- #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef raw_cpu_cmpxchg_double_4
- #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef raw_cpu_cmpxchg_double_8
- #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef this_cpu_read_1
- #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
- #endif
- #ifndef this_cpu_read_2
- #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
- #endif
- #ifndef this_cpu_read_4
- #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
- #endif
- #ifndef this_cpu_read_8
- #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
- #endif
- #ifndef this_cpu_write_1
- #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef this_cpu_write_2
- #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef this_cpu_write_4
- #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef this_cpu_write_8
- #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
- #endif
- #ifndef this_cpu_add_1
- #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef this_cpu_add_2
- #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef this_cpu_add_4
- #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef this_cpu_add_8
- #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
- #endif
- #ifndef this_cpu_and_1
- #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef this_cpu_and_2
- #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef this_cpu_and_4
- #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef this_cpu_and_8
- #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
- #endif
- #ifndef this_cpu_or_1
- #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef this_cpu_or_2
- #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef this_cpu_or_4
- #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef this_cpu_or_8
- #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
- #endif
- #ifndef this_cpu_add_return_1
- #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef this_cpu_add_return_2
- #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef this_cpu_add_return_4
- #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef this_cpu_add_return_8
- #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
- #endif
- #ifndef this_cpu_xchg_1
- #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef this_cpu_xchg_2
- #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef this_cpu_xchg_4
- #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef this_cpu_xchg_8
- #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
- #endif
- #ifndef this_cpu_cmpxchg_1
- #define this_cpu_cmpxchg_1(pcp, oval, nval) \
- this_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef this_cpu_cmpxchg_2
- #define this_cpu_cmpxchg_2(pcp, oval, nval) \
- this_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef this_cpu_cmpxchg_4
- #define this_cpu_cmpxchg_4(pcp, oval, nval) \
- this_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef this_cpu_cmpxchg_8
- #define this_cpu_cmpxchg_8(pcp, oval, nval) \
- this_cpu_generic_cmpxchg(pcp, oval, nval)
- #endif
- #ifndef this_cpu_cmpxchg_double_1
- #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef this_cpu_cmpxchg_double_2
- #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef this_cpu_cmpxchg_double_4
- #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #ifndef this_cpu_cmpxchg_double_8
- #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
- this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
- #endif
- #endif /* _ASM_GENERIC_PERCPU_H_ */
|