0001
0002 #ifndef _ASM_X86_ATOMIC64_32_H
0003 #define _ASM_X86_ATOMIC64_32_H
0004
0005 #include <linux/compiler.h>
0006 #include <linux/types.h>
0007
0008
0009
0010
0011 typedef struct {
0012 s64 __aligned(8) counter;
0013 } atomic64_t;
0014
0015 #define ATOMIC64_INIT(val) { (val) }
0016
0017 #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
0018 #ifndef ATOMIC64_EXPORT
0019 #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
0020 #else
0021 #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
0022 ATOMIC64_EXPORT(atomic64_##sym)
0023 #endif
0024
0025 #ifdef CONFIG_X86_CMPXCHG64
0026 #define __alternative_atomic64(f, g, out, in...) \
0027 asm volatile("call %P[func]" \
0028 : out : [func] "i" (atomic64_##g##_cx8), ## in)
0029
0030 #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
0031 #else
0032 #define __alternative_atomic64(f, g, out, in...) \
0033 alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
0034 X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
0035
0036 #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
0037 ATOMIC64_DECL_ONE(sym##_386)
0038
0039 ATOMIC64_DECL_ONE(add_386);
0040 ATOMIC64_DECL_ONE(sub_386);
0041 ATOMIC64_DECL_ONE(inc_386);
0042 ATOMIC64_DECL_ONE(dec_386);
0043 #endif
0044
0045 #define alternative_atomic64(f, out, in...) \
0046 __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
0047
0048 ATOMIC64_DECL(read);
0049 ATOMIC64_DECL(set);
0050 ATOMIC64_DECL(xchg);
0051 ATOMIC64_DECL(add_return);
0052 ATOMIC64_DECL(sub_return);
0053 ATOMIC64_DECL(inc_return);
0054 ATOMIC64_DECL(dec_return);
0055 ATOMIC64_DECL(dec_if_positive);
0056 ATOMIC64_DECL(inc_not_zero);
0057 ATOMIC64_DECL(add_unless);
0058
0059 #undef ATOMIC64_DECL
0060 #undef ATOMIC64_DECL_ONE
0061 #undef __ATOMIC64_DECL
0062 #undef ATOMIC64_EXPORT
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074 static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
0075 {
0076 return arch_cmpxchg64(&v->counter, o, n);
0077 }
0078 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088 static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
0089 {
0090 s64 o;
0091 unsigned high = (unsigned)(n >> 32);
0092 unsigned low = (unsigned)n;
0093 alternative_atomic64(xchg, "=&A" (o),
0094 "S" (v), "b" (low), "c" (high)
0095 : "memory");
0096 return o;
0097 }
0098 #define arch_atomic64_xchg arch_atomic64_xchg
0099
0100
0101
0102
0103
0104
0105
0106
0107 static inline void arch_atomic64_set(atomic64_t *v, s64 i)
0108 {
0109 unsigned high = (unsigned)(i >> 32);
0110 unsigned low = (unsigned)i;
0111 alternative_atomic64(set, ,
0112 "S" (v), "b" (low), "c" (high)
0113 : "eax", "edx", "memory");
0114 }
0115
0116
0117
0118
0119
0120
0121
0122 static inline s64 arch_atomic64_read(const atomic64_t *v)
0123 {
0124 s64 r;
0125 alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
0126 return r;
0127 }
0128
0129
0130
0131
0132
0133
0134
0135
0136 static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
0137 {
0138 alternative_atomic64(add_return,
0139 ASM_OUTPUT2("+A" (i), "+c" (v)),
0140 ASM_NO_INPUT_CLOBBER("memory"));
0141 return i;
0142 }
0143 #define arch_atomic64_add_return arch_atomic64_add_return
0144
0145
0146
0147
0148 static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
0149 {
0150 alternative_atomic64(sub_return,
0151 ASM_OUTPUT2("+A" (i), "+c" (v)),
0152 ASM_NO_INPUT_CLOBBER("memory"));
0153 return i;
0154 }
0155 #define arch_atomic64_sub_return arch_atomic64_sub_return
0156
0157 static inline s64 arch_atomic64_inc_return(atomic64_t *v)
0158 {
0159 s64 a;
0160 alternative_atomic64(inc_return, "=&A" (a),
0161 "S" (v) : "memory", "ecx");
0162 return a;
0163 }
0164 #define arch_atomic64_inc_return arch_atomic64_inc_return
0165
0166 static inline s64 arch_atomic64_dec_return(atomic64_t *v)
0167 {
0168 s64 a;
0169 alternative_atomic64(dec_return, "=&A" (a),
0170 "S" (v) : "memory", "ecx");
0171 return a;
0172 }
0173 #define arch_atomic64_dec_return arch_atomic64_dec_return
0174
0175
0176
0177
0178
0179
0180
0181
0182 static inline s64 arch_atomic64_add(s64 i, atomic64_t *v)
0183 {
0184 __alternative_atomic64(add, add_return,
0185 ASM_OUTPUT2("+A" (i), "+c" (v)),
0186 ASM_NO_INPUT_CLOBBER("memory"));
0187 return i;
0188 }
0189
0190
0191
0192
0193
0194
0195
0196
0197 static inline s64 arch_atomic64_sub(s64 i, atomic64_t *v)
0198 {
0199 __alternative_atomic64(sub, sub_return,
0200 ASM_OUTPUT2("+A" (i), "+c" (v)),
0201 ASM_NO_INPUT_CLOBBER("memory"));
0202 return i;
0203 }
0204
0205
0206
0207
0208
0209
0210
0211 static inline void arch_atomic64_inc(atomic64_t *v)
0212 {
0213 __alternative_atomic64(inc, inc_return, ,
0214 "S" (v) : "memory", "eax", "ecx", "edx");
0215 }
0216 #define arch_atomic64_inc arch_atomic64_inc
0217
0218
0219
0220
0221
0222
0223
0224 static inline void arch_atomic64_dec(atomic64_t *v)
0225 {
0226 __alternative_atomic64(dec, dec_return, ,
0227 "S" (v) : "memory", "eax", "ecx", "edx");
0228 }
0229 #define arch_atomic64_dec arch_atomic64_dec
0230
0231
0232
0233
0234
0235
0236
0237
0238
0239
0240 static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
0241 {
0242 unsigned low = (unsigned)u;
0243 unsigned high = (unsigned)(u >> 32);
0244 alternative_atomic64(add_unless,
0245 ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
0246 "S" (v) : "memory");
0247 return (int)a;
0248 }
0249 #define arch_atomic64_add_unless arch_atomic64_add_unless
0250
0251 static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
0252 {
0253 int r;
0254 alternative_atomic64(inc_not_zero, "=&a" (r),
0255 "S" (v) : "ecx", "edx", "memory");
0256 return r;
0257 }
0258 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
0259
0260 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
0261 {
0262 s64 r;
0263 alternative_atomic64(dec_if_positive, "=&A" (r),
0264 "S" (v) : "ecx", "memory");
0265 return r;
0266 }
0267 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
0268
0269 #undef alternative_atomic64
0270 #undef __alternative_atomic64
0271
0272 static inline void arch_atomic64_and(s64 i, atomic64_t *v)
0273 {
0274 s64 old, c = 0;
0275
0276 while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
0277 c = old;
0278 }
0279
0280 static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
0281 {
0282 s64 old, c = 0;
0283
0284 while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
0285 c = old;
0286
0287 return old;
0288 }
0289 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
0290
0291 static inline void arch_atomic64_or(s64 i, atomic64_t *v)
0292 {
0293 s64 old, c = 0;
0294
0295 while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
0296 c = old;
0297 }
0298
0299 static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
0300 {
0301 s64 old, c = 0;
0302
0303 while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
0304 c = old;
0305
0306 return old;
0307 }
0308 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
0309
0310 static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
0311 {
0312 s64 old, c = 0;
0313
0314 while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
0315 c = old;
0316 }
0317
0318 static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
0319 {
0320 s64 old, c = 0;
0321
0322 while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
0323 c = old;
0324
0325 return old;
0326 }
0327 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
0328
0329 static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
0330 {
0331 s64 old, c = 0;
0332
0333 while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
0334 c = old;
0335
0336 return old;
0337 }
0338 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
0339
0340 #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v))
0341
0342 #endif