0001
0002
0003
0004
0005
0006 #include <linux/mutex.h>
0007 #include <linux/refcount.h>
0008 #include <linux/spinlock.h>
0009 #include <linux/bug.h>
0010
0011 #define REFCOUNT_WARN(str) WARN_ONCE(1, "refcount_t: " str ".\n")
0012
0013 void refcount_warn_saturate(refcount_t *r, enum refcount_saturation_type t)
0014 {
0015 refcount_set(r, REFCOUNT_SATURATED);
0016
0017 switch (t) {
0018 case REFCOUNT_ADD_NOT_ZERO_OVF:
0019 REFCOUNT_WARN("saturated; leaking memory");
0020 break;
0021 case REFCOUNT_ADD_OVF:
0022 REFCOUNT_WARN("saturated; leaking memory");
0023 break;
0024 case REFCOUNT_ADD_UAF:
0025 REFCOUNT_WARN("addition on 0; use-after-free");
0026 break;
0027 case REFCOUNT_SUB_UAF:
0028 REFCOUNT_WARN("underflow; use-after-free");
0029 break;
0030 case REFCOUNT_DEC_LEAK:
0031 REFCOUNT_WARN("decrement hit 0; leaking memory");
0032 break;
0033 default:
0034 REFCOUNT_WARN("unknown saturation event!?");
0035 }
0036 }
0037 EXPORT_SYMBOL(refcount_warn_saturate);
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055 bool refcount_dec_if_one(refcount_t *r)
0056 {
0057 int val = 1;
0058
0059 return atomic_try_cmpxchg_release(&r->refs, &val, 0);
0060 }
0061 EXPORT_SYMBOL(refcount_dec_if_one);
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074 bool refcount_dec_not_one(refcount_t *r)
0075 {
0076 unsigned int new, val = atomic_read(&r->refs);
0077
0078 do {
0079 if (unlikely(val == REFCOUNT_SATURATED))
0080 return true;
0081
0082 if (val == 1)
0083 return false;
0084
0085 new = val - 1;
0086 if (new > val) {
0087 WARN_ONCE(new > val, "refcount_t: underflow; use-after-free.\n");
0088 return true;
0089 }
0090
0091 } while (!atomic_try_cmpxchg_release(&r->refs, &val, new));
0092
0093 return true;
0094 }
0095 EXPORT_SYMBOL(refcount_dec_not_one);
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111
0112
0113 bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock)
0114 {
0115 if (refcount_dec_not_one(r))
0116 return false;
0117
0118 mutex_lock(lock);
0119 if (!refcount_dec_and_test(r)) {
0120 mutex_unlock(lock);
0121 return false;
0122 }
0123
0124 return true;
0125 }
0126 EXPORT_SYMBOL(refcount_dec_and_mutex_lock);
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144 bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock)
0145 {
0146 if (refcount_dec_not_one(r))
0147 return false;
0148
0149 spin_lock(lock);
0150 if (!refcount_dec_and_test(r)) {
0151 spin_unlock(lock);
0152 return false;
0153 }
0154
0155 return true;
0156 }
0157 EXPORT_SYMBOL(refcount_dec_and_lock);
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172 bool refcount_dec_and_lock_irqsave(refcount_t *r, spinlock_t *lock,
0173 unsigned long *flags)
0174 {
0175 if (refcount_dec_not_one(r))
0176 return false;
0177
0178 spin_lock_irqsave(lock, *flags);
0179 if (!refcount_dec_and_test(r)) {
0180 spin_unlock_irqrestore(lock, *flags);
0181 return false;
0182 }
0183
0184 return true;
0185 }
0186 EXPORT_SYMBOL(refcount_dec_and_lock_irqsave);