0001
0002 #ifndef __LINUX_NODEMASK_H
0003 #define __LINUX_NODEMASK_H
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093 #include <linux/threads.h>
0094 #include <linux/bitmap.h>
0095 #include <linux/minmax.h>
0096 #include <linux/numa.h>
0097 #include <linux/random.h>
0098
0099 typedef struct { DECLARE_BITMAP(bits, MAX_NUMNODES); } nodemask_t;
0100 extern nodemask_t _unused_nodemask_arg_;
0101
0102
0103
0104
0105
0106
0107
0108 #define nodemask_pr_args(maskp) __nodemask_pr_numnodes(maskp), \
0109 __nodemask_pr_bits(maskp)
0110 static inline unsigned int __nodemask_pr_numnodes(const nodemask_t *m)
0111 {
0112 return m ? MAX_NUMNODES : 0;
0113 }
0114 static inline const unsigned long *__nodemask_pr_bits(const nodemask_t *m)
0115 {
0116 return m ? m->bits : NULL;
0117 }
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128 #define node_set(node, dst) __node_set((node), &(dst))
0129 static __always_inline void __node_set(int node, volatile nodemask_t *dstp)
0130 {
0131 set_bit(node, dstp->bits);
0132 }
0133
0134 #define node_clear(node, dst) __node_clear((node), &(dst))
0135 static inline void __node_clear(int node, volatile nodemask_t *dstp)
0136 {
0137 clear_bit(node, dstp->bits);
0138 }
0139
0140 #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES)
0141 static inline void __nodes_setall(nodemask_t *dstp, unsigned int nbits)
0142 {
0143 bitmap_fill(dstp->bits, nbits);
0144 }
0145
0146 #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES)
0147 static inline void __nodes_clear(nodemask_t *dstp, unsigned int nbits)
0148 {
0149 bitmap_zero(dstp->bits, nbits);
0150 }
0151
0152
0153 #define node_isset(node, nodemask) test_bit((node), (nodemask).bits)
0154
0155 #define node_test_and_set(node, nodemask) \
0156 __node_test_and_set((node), &(nodemask))
0157 static inline bool __node_test_and_set(int node, nodemask_t *addr)
0158 {
0159 return test_and_set_bit(node, addr->bits);
0160 }
0161
0162 #define nodes_and(dst, src1, src2) \
0163 __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES)
0164 static inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p,
0165 const nodemask_t *src2p, unsigned int nbits)
0166 {
0167 bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits);
0168 }
0169
0170 #define nodes_or(dst, src1, src2) \
0171 __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES)
0172 static inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p,
0173 const nodemask_t *src2p, unsigned int nbits)
0174 {
0175 bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits);
0176 }
0177
0178 #define nodes_xor(dst, src1, src2) \
0179 __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES)
0180 static inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p,
0181 const nodemask_t *src2p, unsigned int nbits)
0182 {
0183 bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits);
0184 }
0185
0186 #define nodes_andnot(dst, src1, src2) \
0187 __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES)
0188 static inline void __nodes_andnot(nodemask_t *dstp, const nodemask_t *src1p,
0189 const nodemask_t *src2p, unsigned int nbits)
0190 {
0191 bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits);
0192 }
0193
0194 #define nodes_complement(dst, src) \
0195 __nodes_complement(&(dst), &(src), MAX_NUMNODES)
0196 static inline void __nodes_complement(nodemask_t *dstp,
0197 const nodemask_t *srcp, unsigned int nbits)
0198 {
0199 bitmap_complement(dstp->bits, srcp->bits, nbits);
0200 }
0201
0202 #define nodes_equal(src1, src2) \
0203 __nodes_equal(&(src1), &(src2), MAX_NUMNODES)
0204 static inline bool __nodes_equal(const nodemask_t *src1p,
0205 const nodemask_t *src2p, unsigned int nbits)
0206 {
0207 return bitmap_equal(src1p->bits, src2p->bits, nbits);
0208 }
0209
0210 #define nodes_intersects(src1, src2) \
0211 __nodes_intersects(&(src1), &(src2), MAX_NUMNODES)
0212 static inline bool __nodes_intersects(const nodemask_t *src1p,
0213 const nodemask_t *src2p, unsigned int nbits)
0214 {
0215 return bitmap_intersects(src1p->bits, src2p->bits, nbits);
0216 }
0217
0218 #define nodes_subset(src1, src2) \
0219 __nodes_subset(&(src1), &(src2), MAX_NUMNODES)
0220 static inline bool __nodes_subset(const nodemask_t *src1p,
0221 const nodemask_t *src2p, unsigned int nbits)
0222 {
0223 return bitmap_subset(src1p->bits, src2p->bits, nbits);
0224 }
0225
0226 #define nodes_empty(src) __nodes_empty(&(src), MAX_NUMNODES)
0227 static inline bool __nodes_empty(const nodemask_t *srcp, unsigned int nbits)
0228 {
0229 return bitmap_empty(srcp->bits, nbits);
0230 }
0231
0232 #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES)
0233 static inline bool __nodes_full(const nodemask_t *srcp, unsigned int nbits)
0234 {
0235 return bitmap_full(srcp->bits, nbits);
0236 }
0237
0238 #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES)
0239 static inline int __nodes_weight(const nodemask_t *srcp, unsigned int nbits)
0240 {
0241 return bitmap_weight(srcp->bits, nbits);
0242 }
0243
0244 #define nodes_shift_right(dst, src, n) \
0245 __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES)
0246 static inline void __nodes_shift_right(nodemask_t *dstp,
0247 const nodemask_t *srcp, int n, int nbits)
0248 {
0249 bitmap_shift_right(dstp->bits, srcp->bits, n, nbits);
0250 }
0251
0252 #define nodes_shift_left(dst, src, n) \
0253 __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES)
0254 static inline void __nodes_shift_left(nodemask_t *dstp,
0255 const nodemask_t *srcp, int n, int nbits)
0256 {
0257 bitmap_shift_left(dstp->bits, srcp->bits, n, nbits);
0258 }
0259
0260
0261
0262
0263 #define first_node(src) __first_node(&(src))
0264 static inline unsigned int __first_node(const nodemask_t *srcp)
0265 {
0266 return min_t(unsigned int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES));
0267 }
0268
0269 #define next_node(n, src) __next_node((n), &(src))
0270 static inline unsigned int __next_node(int n, const nodemask_t *srcp)
0271 {
0272 return min_t(unsigned int, MAX_NUMNODES, find_next_bit(srcp->bits, MAX_NUMNODES, n+1));
0273 }
0274
0275
0276
0277
0278
0279 #define next_node_in(n, src) __next_node_in((n), &(src))
0280 static inline unsigned int __next_node_in(int node, const nodemask_t *srcp)
0281 {
0282 unsigned int ret = __next_node(node, srcp);
0283
0284 if (ret == MAX_NUMNODES)
0285 ret = __first_node(srcp);
0286 return ret;
0287 }
0288
0289 static inline void init_nodemask_of_node(nodemask_t *mask, int node)
0290 {
0291 nodes_clear(*mask);
0292 node_set(node, *mask);
0293 }
0294
0295 #define nodemask_of_node(node) \
0296 ({ \
0297 typeof(_unused_nodemask_arg_) m; \
0298 if (sizeof(m) == sizeof(unsigned long)) { \
0299 m.bits[0] = 1UL << (node); \
0300 } else { \
0301 init_nodemask_of_node(&m, (node)); \
0302 } \
0303 m; \
0304 })
0305
0306 #define first_unset_node(mask) __first_unset_node(&(mask))
0307 static inline unsigned int __first_unset_node(const nodemask_t *maskp)
0308 {
0309 return min_t(unsigned int, MAX_NUMNODES,
0310 find_first_zero_bit(maskp->bits, MAX_NUMNODES));
0311 }
0312
0313 #define NODE_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(MAX_NUMNODES)
0314
0315 #if MAX_NUMNODES <= BITS_PER_LONG
0316
0317 #define NODE_MASK_ALL \
0318 ((nodemask_t) { { \
0319 [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \
0320 } })
0321
0322 #else
0323
0324 #define NODE_MASK_ALL \
0325 ((nodemask_t) { { \
0326 [0 ... BITS_TO_LONGS(MAX_NUMNODES)-2] = ~0UL, \
0327 [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD \
0328 } })
0329
0330 #endif
0331
0332 #define NODE_MASK_NONE \
0333 ((nodemask_t) { { \
0334 [0 ... BITS_TO_LONGS(MAX_NUMNODES)-1] = 0UL \
0335 } })
0336
0337 #define nodes_addr(src) ((src).bits)
0338
0339 #define nodemask_parse_user(ubuf, ulen, dst) \
0340 __nodemask_parse_user((ubuf), (ulen), &(dst), MAX_NUMNODES)
0341 static inline int __nodemask_parse_user(const char __user *buf, int len,
0342 nodemask_t *dstp, int nbits)
0343 {
0344 return bitmap_parse_user(buf, len, dstp->bits, nbits);
0345 }
0346
0347 #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES)
0348 static inline int __nodelist_parse(const char *buf, nodemask_t *dstp, int nbits)
0349 {
0350 return bitmap_parselist(buf, dstp->bits, nbits);
0351 }
0352
0353 #define node_remap(oldbit, old, new) \
0354 __node_remap((oldbit), &(old), &(new), MAX_NUMNODES)
0355 static inline int __node_remap(int oldbit,
0356 const nodemask_t *oldp, const nodemask_t *newp, int nbits)
0357 {
0358 return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits);
0359 }
0360
0361 #define nodes_remap(dst, src, old, new) \
0362 __nodes_remap(&(dst), &(src), &(old), &(new), MAX_NUMNODES)
0363 static inline void __nodes_remap(nodemask_t *dstp, const nodemask_t *srcp,
0364 const nodemask_t *oldp, const nodemask_t *newp, int nbits)
0365 {
0366 bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits);
0367 }
0368
0369 #define nodes_onto(dst, orig, relmap) \
0370 __nodes_onto(&(dst), &(orig), &(relmap), MAX_NUMNODES)
0371 static inline void __nodes_onto(nodemask_t *dstp, const nodemask_t *origp,
0372 const nodemask_t *relmapp, int nbits)
0373 {
0374 bitmap_onto(dstp->bits, origp->bits, relmapp->bits, nbits);
0375 }
0376
0377 #define nodes_fold(dst, orig, sz) \
0378 __nodes_fold(&(dst), &(orig), sz, MAX_NUMNODES)
0379 static inline void __nodes_fold(nodemask_t *dstp, const nodemask_t *origp,
0380 int sz, int nbits)
0381 {
0382 bitmap_fold(dstp->bits, origp->bits, sz, nbits);
0383 }
0384
0385 #if MAX_NUMNODES > 1
0386 #define for_each_node_mask(node, mask) \
0387 for ((node) = first_node(mask); \
0388 (node >= 0) && (node) < MAX_NUMNODES; \
0389 (node) = next_node((node), (mask)))
0390 #else
0391 #define for_each_node_mask(node, mask) \
0392 for ((node) = 0; (node) < 1 && !nodes_empty(mask); (node)++)
0393 #endif
0394
0395
0396
0397
0398 enum node_states {
0399 N_POSSIBLE,
0400 N_ONLINE,
0401 N_NORMAL_MEMORY,
0402 #ifdef CONFIG_HIGHMEM
0403 N_HIGH_MEMORY,
0404 #else
0405 N_HIGH_MEMORY = N_NORMAL_MEMORY,
0406 #endif
0407 N_MEMORY,
0408 N_CPU,
0409 N_GENERIC_INITIATOR,
0410 NR_NODE_STATES
0411 };
0412
0413
0414
0415
0416
0417
0418 extern nodemask_t node_states[NR_NODE_STATES];
0419
0420 #if MAX_NUMNODES > 1
0421 static inline int node_state(int node, enum node_states state)
0422 {
0423 return node_isset(node, node_states[state]);
0424 }
0425
0426 static inline void node_set_state(int node, enum node_states state)
0427 {
0428 __node_set(node, &node_states[state]);
0429 }
0430
0431 static inline void node_clear_state(int node, enum node_states state)
0432 {
0433 __node_clear(node, &node_states[state]);
0434 }
0435
0436 static inline int num_node_state(enum node_states state)
0437 {
0438 return nodes_weight(node_states[state]);
0439 }
0440
0441 #define for_each_node_state(__node, __state) \
0442 for_each_node_mask((__node), node_states[__state])
0443
0444 #define first_online_node first_node(node_states[N_ONLINE])
0445 #define first_memory_node first_node(node_states[N_MEMORY])
0446 static inline unsigned int next_online_node(int nid)
0447 {
0448 return next_node(nid, node_states[N_ONLINE]);
0449 }
0450 static inline unsigned int next_memory_node(int nid)
0451 {
0452 return next_node(nid, node_states[N_MEMORY]);
0453 }
0454
0455 extern unsigned int nr_node_ids;
0456 extern unsigned int nr_online_nodes;
0457
0458 static inline void node_set_online(int nid)
0459 {
0460 node_set_state(nid, N_ONLINE);
0461 nr_online_nodes = num_node_state(N_ONLINE);
0462 }
0463
0464 static inline void node_set_offline(int nid)
0465 {
0466 node_clear_state(nid, N_ONLINE);
0467 nr_online_nodes = num_node_state(N_ONLINE);
0468 }
0469
0470 #else
0471
0472 static inline int node_state(int node, enum node_states state)
0473 {
0474 return node == 0;
0475 }
0476
0477 static inline void node_set_state(int node, enum node_states state)
0478 {
0479 }
0480
0481 static inline void node_clear_state(int node, enum node_states state)
0482 {
0483 }
0484
0485 static inline int num_node_state(enum node_states state)
0486 {
0487 return 1;
0488 }
0489
0490 #define for_each_node_state(node, __state) \
0491 for ( (node) = 0; (node) == 0; (node) = 1)
0492
0493 #define first_online_node 0
0494 #define first_memory_node 0
0495 #define next_online_node(nid) (MAX_NUMNODES)
0496 #define nr_node_ids 1U
0497 #define nr_online_nodes 1U
0498
0499 #define node_set_online(node) node_set_state((node), N_ONLINE)
0500 #define node_set_offline(node) node_clear_state((node), N_ONLINE)
0501
0502 #endif
0503
0504 static inline int node_random(const nodemask_t *maskp)
0505 {
0506 #if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1)
0507 int w, bit = NUMA_NO_NODE;
0508
0509 w = nodes_weight(*maskp);
0510 if (w)
0511 bit = bitmap_ord_to_pos(maskp->bits,
0512 get_random_int() % w, MAX_NUMNODES);
0513 return bit;
0514 #else
0515 return 0;
0516 #endif
0517 }
0518
0519 #define node_online_map node_states[N_ONLINE]
0520 #define node_possible_map node_states[N_POSSIBLE]
0521
0522 #define num_online_nodes() num_node_state(N_ONLINE)
0523 #define num_possible_nodes() num_node_state(N_POSSIBLE)
0524 #define node_online(node) node_state((node), N_ONLINE)
0525 #define node_possible(node) node_state((node), N_POSSIBLE)
0526
0527 #define for_each_node(node) for_each_node_state(node, N_POSSIBLE)
0528 #define for_each_online_node(node) for_each_node_state(node, N_ONLINE)
0529
0530
0531
0532
0533
0534
0535 #if NODES_SHIFT > 8
0536 #define NODEMASK_ALLOC(type, name, gfp_flags) \
0537 type *name = kmalloc(sizeof(*name), gfp_flags)
0538 #define NODEMASK_FREE(m) kfree(m)
0539 #else
0540 #define NODEMASK_ALLOC(type, name, gfp_flags) type _##name, *name = &_##name
0541 #define NODEMASK_FREE(m) do {} while (0)
0542 #endif
0543
0544
0545 struct nodemask_scratch {
0546 nodemask_t mask1;
0547 nodemask_t mask2;
0548 };
0549
0550 #define NODEMASK_SCRATCH(x) \
0551 NODEMASK_ALLOC(struct nodemask_scratch, x, \
0552 GFP_KERNEL | __GFP_NORETRY)
0553 #define NODEMASK_SCRATCH_FREE(x) NODEMASK_FREE(x)
0554
0555
0556 #endif