0001
0002 #ifndef __ARCH_S390_PERCPU__
0003 #define __ARCH_S390_PERCPU__
0004
0005 #include <linux/preempt.h>
0006 #include <asm/cmpxchg.h>
0007
0008
0009
0010
0011
0012 #define __my_cpu_offset S390_lowcore.percpu_offset
0013
0014
0015
0016
0017
0018
0019 #if defined(MODULE)
0020 #define ARCH_NEEDS_WEAK_PER_CPU
0021 #endif
0022
0023
0024
0025
0026
0027 #define arch_this_cpu_to_op_simple(pcp, val, op) \
0028 ({ \
0029 typedef typeof(pcp) pcp_op_T__; \
0030 pcp_op_T__ old__, new__, prev__; \
0031 pcp_op_T__ *ptr__; \
0032 preempt_disable_notrace(); \
0033 ptr__ = raw_cpu_ptr(&(pcp)); \
0034 prev__ = *ptr__; \
0035 do { \
0036 old__ = prev__; \
0037 new__ = old__ op (val); \
0038 prev__ = cmpxchg(ptr__, old__, new__); \
0039 } while (prev__ != old__); \
0040 preempt_enable_notrace(); \
0041 new__; \
0042 })
0043
0044 #define this_cpu_add_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0045 #define this_cpu_add_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0046 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0047 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0048 #define this_cpu_and_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &)
0049 #define this_cpu_and_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &)
0050 #define this_cpu_or_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
0051 #define this_cpu_or_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
0052
0053 #ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
0054
0055 #define this_cpu_add_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0056 #define this_cpu_add_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0057 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0058 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
0059 #define this_cpu_and_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &)
0060 #define this_cpu_and_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &)
0061 #define this_cpu_or_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
0062 #define this_cpu_or_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |)
0063
0064 #else
0065
0066 #define arch_this_cpu_add(pcp, val, op1, op2, szcast) \
0067 { \
0068 typedef typeof(pcp) pcp_op_T__; \
0069 pcp_op_T__ val__ = (val); \
0070 pcp_op_T__ old__, *ptr__; \
0071 preempt_disable_notrace(); \
0072 ptr__ = raw_cpu_ptr(&(pcp)); \
0073 if (__builtin_constant_p(val__) && \
0074 ((szcast)val__ > -129) && ((szcast)val__ < 128)) { \
0075 asm volatile( \
0076 op2 " %[ptr__],%[val__]\n" \
0077 : [ptr__] "+Q" (*ptr__) \
0078 : [val__] "i" ((szcast)val__) \
0079 : "cc"); \
0080 } else { \
0081 asm volatile( \
0082 op1 " %[old__],%[val__],%[ptr__]\n" \
0083 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__) \
0084 : [val__] "d" (val__) \
0085 : "cc"); \
0086 } \
0087 preempt_enable_notrace(); \
0088 }
0089
0090 #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
0091 #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
0092
0093 #define arch_this_cpu_add_return(pcp, val, op) \
0094 ({ \
0095 typedef typeof(pcp) pcp_op_T__; \
0096 pcp_op_T__ val__ = (val); \
0097 pcp_op_T__ old__, *ptr__; \
0098 preempt_disable_notrace(); \
0099 ptr__ = raw_cpu_ptr(&(pcp)); \
0100 asm volatile( \
0101 op " %[old__],%[val__],%[ptr__]\n" \
0102 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__) \
0103 : [val__] "d" (val__) \
0104 : "cc"); \
0105 preempt_enable_notrace(); \
0106 old__ + val__; \
0107 })
0108
0109 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
0110 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
0111
0112 #define arch_this_cpu_to_op(pcp, val, op) \
0113 { \
0114 typedef typeof(pcp) pcp_op_T__; \
0115 pcp_op_T__ val__ = (val); \
0116 pcp_op_T__ old__, *ptr__; \
0117 preempt_disable_notrace(); \
0118 ptr__ = raw_cpu_ptr(&(pcp)); \
0119 asm volatile( \
0120 op " %[old__],%[val__],%[ptr__]\n" \
0121 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__) \
0122 : [val__] "d" (val__) \
0123 : "cc"); \
0124 preempt_enable_notrace(); \
0125 }
0126
0127 #define this_cpu_and_4(pcp, val) arch_this_cpu_to_op(pcp, val, "lan")
0128 #define this_cpu_and_8(pcp, val) arch_this_cpu_to_op(pcp, val, "lang")
0129 #define this_cpu_or_4(pcp, val) arch_this_cpu_to_op(pcp, val, "lao")
0130 #define this_cpu_or_8(pcp, val) arch_this_cpu_to_op(pcp, val, "laog")
0131
0132 #endif
0133
0134 #define arch_this_cpu_cmpxchg(pcp, oval, nval) \
0135 ({ \
0136 typedef typeof(pcp) pcp_op_T__; \
0137 pcp_op_T__ ret__; \
0138 pcp_op_T__ *ptr__; \
0139 preempt_disable_notrace(); \
0140 ptr__ = raw_cpu_ptr(&(pcp)); \
0141 ret__ = cmpxchg(ptr__, oval, nval); \
0142 preempt_enable_notrace(); \
0143 ret__; \
0144 })
0145
0146 #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
0147 #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
0148 #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
0149 #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
0150
0151 #define arch_this_cpu_xchg(pcp, nval) \
0152 ({ \
0153 typeof(pcp) *ptr__; \
0154 typeof(pcp) ret__; \
0155 preempt_disable_notrace(); \
0156 ptr__ = raw_cpu_ptr(&(pcp)); \
0157 ret__ = xchg(ptr__, nval); \
0158 preempt_enable_notrace(); \
0159 ret__; \
0160 })
0161
0162 #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
0163 #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
0164 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
0165 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
0166
0167 #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2) \
0168 ({ \
0169 typeof(pcp1) *p1__; \
0170 typeof(pcp2) *p2__; \
0171 int ret__; \
0172 \
0173 preempt_disable_notrace(); \
0174 p1__ = raw_cpu_ptr(&(pcp1)); \
0175 p2__ = raw_cpu_ptr(&(pcp2)); \
0176 ret__ = __cmpxchg_double((unsigned long)p1__, (unsigned long)p2__, \
0177 (unsigned long)(o1), (unsigned long)(o2), \
0178 (unsigned long)(n1), (unsigned long)(n2)); \
0179 preempt_enable_notrace(); \
0180 ret__; \
0181 })
0182
0183 #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double
0184
0185 #include <asm-generic/percpu.h>
0186
0187 #endif