Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef __LINUX_NODEMASK_H
0003 #define __LINUX_NODEMASK_H
0004 
0005 /*
0006  * Nodemasks provide a bitmap suitable for representing the
0007  * set of Node's in a system, one bit position per Node number.
0008  *
0009  * See detailed comments in the file linux/bitmap.h describing the
0010  * data type on which these nodemasks are based.
0011  *
0012  * For details of nodemask_parse_user(), see bitmap_parse_user() in
0013  * lib/bitmap.c.  For details of nodelist_parse(), see bitmap_parselist(),
0014  * also in bitmap.c.  For details of node_remap(), see bitmap_bitremap in
0015  * lib/bitmap.c.  For details of nodes_remap(), see bitmap_remap in
0016  * lib/bitmap.c.  For details of nodes_onto(), see bitmap_onto in
0017  * lib/bitmap.c.  For details of nodes_fold(), see bitmap_fold in
0018  * lib/bitmap.c.
0019  *
0020  * The available nodemask operations are:
0021  *
0022  * void node_set(node, mask)        turn on bit 'node' in mask
0023  * void node_clear(node, mask)      turn off bit 'node' in mask
0024  * void nodes_setall(mask)      set all bits
0025  * void nodes_clear(mask)       clear all bits
0026  * int node_isset(node, mask)       true iff bit 'node' set in mask
0027  * int node_test_and_set(node, mask)    test and set bit 'node' in mask
0028  *
0029  * void nodes_and(dst, src1, src2)  dst = src1 & src2  [intersection]
0030  * void nodes_or(dst, src1, src2)   dst = src1 | src2  [union]
0031  * void nodes_xor(dst, src1, src2)  dst = src1 ^ src2
0032  * void nodes_andnot(dst, src1, src2)   dst = src1 & ~src2
0033  * void nodes_complement(dst, src)  dst = ~src
0034  *
0035  * int nodes_equal(mask1, mask2)    Does mask1 == mask2?
0036  * int nodes_intersects(mask1, mask2)   Do mask1 and mask2 intersect?
0037  * int nodes_subset(mask1, mask2)   Is mask1 a subset of mask2?
0038  * int nodes_empty(mask)        Is mask empty (no bits sets)?
0039  * int nodes_full(mask)         Is mask full (all bits sets)?
0040  * int nodes_weight(mask)       Hamming weight - number of set bits
0041  *
0042  * void nodes_shift_right(dst, src, n)  Shift right
0043  * void nodes_shift_left(dst, src, n)   Shift left
0044  *
0045  * unsigned int first_node(mask)    Number lowest set bit, or MAX_NUMNODES
0046  * unsigend int next_node(node, mask)   Next node past 'node', or MAX_NUMNODES
0047  * unsigned int next_node_in(node, mask) Next node past 'node', or wrap to first,
0048  *                  or MAX_NUMNODES
0049  * unsigned int first_unset_node(mask)  First node not set in mask, or
0050  *                  MAX_NUMNODES
0051  *
0052  * nodemask_t nodemask_of_node(node)    Return nodemask with bit 'node' set
0053  * NODE_MASK_ALL            Initializer - all bits set
0054  * NODE_MASK_NONE           Initializer - no bits set
0055  * unsigned long *nodes_addr(mask)  Array of unsigned long's in mask
0056  *
0057  * int nodemask_parse_user(ubuf, ulen, mask)    Parse ascii string as nodemask
0058  * int nodelist_parse(buf, map)     Parse ascii string as nodelist
0059  * int node_remap(oldbit, old, new) newbit = map(old, new)(oldbit)
0060  * void nodes_remap(dst, src, old, new) *dst = map(old, new)(src)
0061  * void nodes_onto(dst, orig, relmap)   *dst = orig relative to relmap
0062  * void nodes_fold(dst, orig, sz)   dst bits = orig bits mod sz
0063  *
0064  * for_each_node_mask(node, mask)   for-loop node over mask
0065  *
0066  * int num_online_nodes()       Number of online Nodes
0067  * int num_possible_nodes()     Number of all possible Nodes
0068  *
0069  * int node_random(mask)        Random node with set bit in mask
0070  *
0071  * int node_online(node)        Is some node online?
0072  * int node_possible(node)      Is some node possible?
0073  *
0074  * node_set_online(node)        set bit 'node' in node_online_map
0075  * node_set_offline(node)       clear bit 'node' in node_online_map
0076  *
0077  * for_each_node(node)          for-loop node over node_possible_map
0078  * for_each_online_node(node)       for-loop node over node_online_map
0079  *
0080  * Subtlety:
0081  * 1) The 'type-checked' form of node_isset() causes gcc (3.3.2, anyway)
0082  *    to generate slightly worse code.  So use a simple one-line #define
0083  *    for node_isset(), instead of wrapping an inline inside a macro, the
0084  *    way we do the other calls.
0085  *
0086  * NODEMASK_SCRATCH
0087  * When doing above logical AND, OR, XOR, Remap operations the callers tend to
0088  * need temporary nodemask_t's on the stack. But if NODES_SHIFT is large,
0089  * nodemask_t's consume too much stack space.  NODEMASK_SCRATCH is a helper
0090  * for such situations. See below and CPUMASK_ALLOC also.
0091  */
0092 
0093 #include <linux/threads.h>
0094 #include <linux/bitmap.h>
0095 #include <linux/minmax.h>
0096 #include <linux/numa.h>
0097 #include <linux/random.h>
0098 
0099 typedef struct { DECLARE_BITMAP(bits, MAX_NUMNODES); } nodemask_t;
0100 extern nodemask_t _unused_nodemask_arg_;
0101 
0102 /**
0103  * nodemask_pr_args - printf args to output a nodemask
0104  * @maskp: nodemask to be printed
0105  *
0106  * Can be used to provide arguments for '%*pb[l]' when printing a nodemask.
0107  */
0108 #define nodemask_pr_args(maskp) __nodemask_pr_numnodes(maskp), \
0109                 __nodemask_pr_bits(maskp)
0110 static inline unsigned int __nodemask_pr_numnodes(const nodemask_t *m)
0111 {
0112     return m ? MAX_NUMNODES : 0;
0113 }
0114 static inline const unsigned long *__nodemask_pr_bits(const nodemask_t *m)
0115 {
0116     return m ? m->bits : NULL;
0117 }
0118 
0119 /*
0120  * The inline keyword gives the compiler room to decide to inline, or
0121  * not inline a function as it sees best.  However, as these functions
0122  * are called in both __init and non-__init functions, if they are not
0123  * inlined we will end up with a section mismatch error (of the type of
0124  * freeable items not being freed).  So we must use __always_inline here
0125  * to fix the problem.  If other functions in the future also end up in
0126  * this situation they will also need to be annotated as __always_inline
0127  */
0128 #define node_set(node, dst) __node_set((node), &(dst))
0129 static __always_inline void __node_set(int node, volatile nodemask_t *dstp)
0130 {
0131     set_bit(node, dstp->bits);
0132 }
0133 
0134 #define node_clear(node, dst) __node_clear((node), &(dst))
0135 static inline void __node_clear(int node, volatile nodemask_t *dstp)
0136 {
0137     clear_bit(node, dstp->bits);
0138 }
0139 
0140 #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES)
0141 static inline void __nodes_setall(nodemask_t *dstp, unsigned int nbits)
0142 {
0143     bitmap_fill(dstp->bits, nbits);
0144 }
0145 
0146 #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES)
0147 static inline void __nodes_clear(nodemask_t *dstp, unsigned int nbits)
0148 {
0149     bitmap_zero(dstp->bits, nbits);
0150 }
0151 
0152 /* No static inline type checking - see Subtlety (1) above. */
0153 #define node_isset(node, nodemask) test_bit((node), (nodemask).bits)
0154 
0155 #define node_test_and_set(node, nodemask) \
0156             __node_test_and_set((node), &(nodemask))
0157 static inline bool __node_test_and_set(int node, nodemask_t *addr)
0158 {
0159     return test_and_set_bit(node, addr->bits);
0160 }
0161 
0162 #define nodes_and(dst, src1, src2) \
0163             __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES)
0164 static inline void __nodes_and(nodemask_t *dstp, const nodemask_t *src1p,
0165                     const nodemask_t *src2p, unsigned int nbits)
0166 {
0167     bitmap_and(dstp->bits, src1p->bits, src2p->bits, nbits);
0168 }
0169 
0170 #define nodes_or(dst, src1, src2) \
0171             __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES)
0172 static inline void __nodes_or(nodemask_t *dstp, const nodemask_t *src1p,
0173                     const nodemask_t *src2p, unsigned int nbits)
0174 {
0175     bitmap_or(dstp->bits, src1p->bits, src2p->bits, nbits);
0176 }
0177 
0178 #define nodes_xor(dst, src1, src2) \
0179             __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES)
0180 static inline void __nodes_xor(nodemask_t *dstp, const nodemask_t *src1p,
0181                     const nodemask_t *src2p, unsigned int nbits)
0182 {
0183     bitmap_xor(dstp->bits, src1p->bits, src2p->bits, nbits);
0184 }
0185 
0186 #define nodes_andnot(dst, src1, src2) \
0187             __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES)
0188 static inline void __nodes_andnot(nodemask_t *dstp, const nodemask_t *src1p,
0189                     const nodemask_t *src2p, unsigned int nbits)
0190 {
0191     bitmap_andnot(dstp->bits, src1p->bits, src2p->bits, nbits);
0192 }
0193 
0194 #define nodes_complement(dst, src) \
0195             __nodes_complement(&(dst), &(src), MAX_NUMNODES)
0196 static inline void __nodes_complement(nodemask_t *dstp,
0197                     const nodemask_t *srcp, unsigned int nbits)
0198 {
0199     bitmap_complement(dstp->bits, srcp->bits, nbits);
0200 }
0201 
0202 #define nodes_equal(src1, src2) \
0203             __nodes_equal(&(src1), &(src2), MAX_NUMNODES)
0204 static inline bool __nodes_equal(const nodemask_t *src1p,
0205                     const nodemask_t *src2p, unsigned int nbits)
0206 {
0207     return bitmap_equal(src1p->bits, src2p->bits, nbits);
0208 }
0209 
0210 #define nodes_intersects(src1, src2) \
0211             __nodes_intersects(&(src1), &(src2), MAX_NUMNODES)
0212 static inline bool __nodes_intersects(const nodemask_t *src1p,
0213                     const nodemask_t *src2p, unsigned int nbits)
0214 {
0215     return bitmap_intersects(src1p->bits, src2p->bits, nbits);
0216 }
0217 
0218 #define nodes_subset(src1, src2) \
0219             __nodes_subset(&(src1), &(src2), MAX_NUMNODES)
0220 static inline bool __nodes_subset(const nodemask_t *src1p,
0221                     const nodemask_t *src2p, unsigned int nbits)
0222 {
0223     return bitmap_subset(src1p->bits, src2p->bits, nbits);
0224 }
0225 
0226 #define nodes_empty(src) __nodes_empty(&(src), MAX_NUMNODES)
0227 static inline bool __nodes_empty(const nodemask_t *srcp, unsigned int nbits)
0228 {
0229     return bitmap_empty(srcp->bits, nbits);
0230 }
0231 
0232 #define nodes_full(nodemask) __nodes_full(&(nodemask), MAX_NUMNODES)
0233 static inline bool __nodes_full(const nodemask_t *srcp, unsigned int nbits)
0234 {
0235     return bitmap_full(srcp->bits, nbits);
0236 }
0237 
0238 #define nodes_weight(nodemask) __nodes_weight(&(nodemask), MAX_NUMNODES)
0239 static inline int __nodes_weight(const nodemask_t *srcp, unsigned int nbits)
0240 {
0241     return bitmap_weight(srcp->bits, nbits);
0242 }
0243 
0244 #define nodes_shift_right(dst, src, n) \
0245             __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES)
0246 static inline void __nodes_shift_right(nodemask_t *dstp,
0247                     const nodemask_t *srcp, int n, int nbits)
0248 {
0249     bitmap_shift_right(dstp->bits, srcp->bits, n, nbits);
0250 }
0251 
0252 #define nodes_shift_left(dst, src, n) \
0253             __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES)
0254 static inline void __nodes_shift_left(nodemask_t *dstp,
0255                     const nodemask_t *srcp, int n, int nbits)
0256 {
0257     bitmap_shift_left(dstp->bits, srcp->bits, n, nbits);
0258 }
0259 
0260 /* FIXME: better would be to fix all architectures to never return
0261           > MAX_NUMNODES, then the silly min_ts could be dropped. */
0262 
0263 #define first_node(src) __first_node(&(src))
0264 static inline unsigned int __first_node(const nodemask_t *srcp)
0265 {
0266     return min_t(unsigned int, MAX_NUMNODES, find_first_bit(srcp->bits, MAX_NUMNODES));
0267 }
0268 
0269 #define next_node(n, src) __next_node((n), &(src))
0270 static inline unsigned int __next_node(int n, const nodemask_t *srcp)
0271 {
0272     return min_t(unsigned int, MAX_NUMNODES, find_next_bit(srcp->bits, MAX_NUMNODES, n+1));
0273 }
0274 
0275 /*
0276  * Find the next present node in src, starting after node n, wrapping around to
0277  * the first node in src if needed.  Returns MAX_NUMNODES if src is empty.
0278  */
0279 #define next_node_in(n, src) __next_node_in((n), &(src))
0280 static inline unsigned int __next_node_in(int node, const nodemask_t *srcp)
0281 {
0282     unsigned int ret = __next_node(node, srcp);
0283 
0284     if (ret == MAX_NUMNODES)
0285         ret = __first_node(srcp);
0286     return ret;
0287 }
0288 
0289 static inline void init_nodemask_of_node(nodemask_t *mask, int node)
0290 {
0291     nodes_clear(*mask);
0292     node_set(node, *mask);
0293 }
0294 
0295 #define nodemask_of_node(node)                      \
0296 ({                                  \
0297     typeof(_unused_nodemask_arg_) m;                \
0298     if (sizeof(m) == sizeof(unsigned long)) {           \
0299         m.bits[0] = 1UL << (node);              \
0300     } else {                            \
0301         init_nodemask_of_node(&m, (node));          \
0302     }                               \
0303     m;                              \
0304 })
0305 
0306 #define first_unset_node(mask) __first_unset_node(&(mask))
0307 static inline unsigned int __first_unset_node(const nodemask_t *maskp)
0308 {
0309     return min_t(unsigned int, MAX_NUMNODES,
0310             find_first_zero_bit(maskp->bits, MAX_NUMNODES));
0311 }
0312 
0313 #define NODE_MASK_LAST_WORD BITMAP_LAST_WORD_MASK(MAX_NUMNODES)
0314 
0315 #if MAX_NUMNODES <= BITS_PER_LONG
0316 
0317 #define NODE_MASK_ALL                           \
0318 ((nodemask_t) { {                           \
0319     [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD       \
0320 } })
0321 
0322 #else
0323 
0324 #define NODE_MASK_ALL                           \
0325 ((nodemask_t) { {                           \
0326     [0 ... BITS_TO_LONGS(MAX_NUMNODES)-2] = ~0UL,           \
0327     [BITS_TO_LONGS(MAX_NUMNODES)-1] = NODE_MASK_LAST_WORD       \
0328 } })
0329 
0330 #endif
0331 
0332 #define NODE_MASK_NONE                          \
0333 ((nodemask_t) { {                           \
0334     [0 ... BITS_TO_LONGS(MAX_NUMNODES)-1] =  0UL            \
0335 } })
0336 
0337 #define nodes_addr(src) ((src).bits)
0338 
0339 #define nodemask_parse_user(ubuf, ulen, dst) \
0340         __nodemask_parse_user((ubuf), (ulen), &(dst), MAX_NUMNODES)
0341 static inline int __nodemask_parse_user(const char __user *buf, int len,
0342                     nodemask_t *dstp, int nbits)
0343 {
0344     return bitmap_parse_user(buf, len, dstp->bits, nbits);
0345 }
0346 
0347 #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES)
0348 static inline int __nodelist_parse(const char *buf, nodemask_t *dstp, int nbits)
0349 {
0350     return bitmap_parselist(buf, dstp->bits, nbits);
0351 }
0352 
0353 #define node_remap(oldbit, old, new) \
0354         __node_remap((oldbit), &(old), &(new), MAX_NUMNODES)
0355 static inline int __node_remap(int oldbit,
0356         const nodemask_t *oldp, const nodemask_t *newp, int nbits)
0357 {
0358     return bitmap_bitremap(oldbit, oldp->bits, newp->bits, nbits);
0359 }
0360 
0361 #define nodes_remap(dst, src, old, new) \
0362         __nodes_remap(&(dst), &(src), &(old), &(new), MAX_NUMNODES)
0363 static inline void __nodes_remap(nodemask_t *dstp, const nodemask_t *srcp,
0364         const nodemask_t *oldp, const nodemask_t *newp, int nbits)
0365 {
0366     bitmap_remap(dstp->bits, srcp->bits, oldp->bits, newp->bits, nbits);
0367 }
0368 
0369 #define nodes_onto(dst, orig, relmap) \
0370         __nodes_onto(&(dst), &(orig), &(relmap), MAX_NUMNODES)
0371 static inline void __nodes_onto(nodemask_t *dstp, const nodemask_t *origp,
0372         const nodemask_t *relmapp, int nbits)
0373 {
0374     bitmap_onto(dstp->bits, origp->bits, relmapp->bits, nbits);
0375 }
0376 
0377 #define nodes_fold(dst, orig, sz) \
0378         __nodes_fold(&(dst), &(orig), sz, MAX_NUMNODES)
0379 static inline void __nodes_fold(nodemask_t *dstp, const nodemask_t *origp,
0380         int sz, int nbits)
0381 {
0382     bitmap_fold(dstp->bits, origp->bits, sz, nbits);
0383 }
0384 
0385 #if MAX_NUMNODES > 1
0386 #define for_each_node_mask(node, mask)                  \
0387     for ((node) = first_node(mask);                 \
0388          (node >= 0) && (node) < MAX_NUMNODES;          \
0389          (node) = next_node((node), (mask)))
0390 #else /* MAX_NUMNODES == 1 */
0391 #define for_each_node_mask(node, mask)                                  \
0392     for ((node) = 0; (node) < 1 && !nodes_empty(mask); (node)++)
0393 #endif /* MAX_NUMNODES */
0394 
0395 /*
0396  * Bitmasks that are kept for all the nodes.
0397  */
0398 enum node_states {
0399     N_POSSIBLE,     /* The node could become online at some point */
0400     N_ONLINE,       /* The node is online */
0401     N_NORMAL_MEMORY,    /* The node has regular memory */
0402 #ifdef CONFIG_HIGHMEM
0403     N_HIGH_MEMORY,      /* The node has regular or high memory */
0404 #else
0405     N_HIGH_MEMORY = N_NORMAL_MEMORY,
0406 #endif
0407     N_MEMORY,       /* The node has memory(regular, high, movable) */
0408     N_CPU,      /* The node has one or more cpus */
0409     N_GENERIC_INITIATOR,    /* The node has one or more Generic Initiators */
0410     NR_NODE_STATES
0411 };
0412 
0413 /*
0414  * The following particular system nodemasks and operations
0415  * on them manage all possible and online nodes.
0416  */
0417 
0418 extern nodemask_t node_states[NR_NODE_STATES];
0419 
0420 #if MAX_NUMNODES > 1
0421 static inline int node_state(int node, enum node_states state)
0422 {
0423     return node_isset(node, node_states[state]);
0424 }
0425 
0426 static inline void node_set_state(int node, enum node_states state)
0427 {
0428     __node_set(node, &node_states[state]);
0429 }
0430 
0431 static inline void node_clear_state(int node, enum node_states state)
0432 {
0433     __node_clear(node, &node_states[state]);
0434 }
0435 
0436 static inline int num_node_state(enum node_states state)
0437 {
0438     return nodes_weight(node_states[state]);
0439 }
0440 
0441 #define for_each_node_state(__node, __state) \
0442     for_each_node_mask((__node), node_states[__state])
0443 
0444 #define first_online_node   first_node(node_states[N_ONLINE])
0445 #define first_memory_node   first_node(node_states[N_MEMORY])
0446 static inline unsigned int next_online_node(int nid)
0447 {
0448     return next_node(nid, node_states[N_ONLINE]);
0449 }
0450 static inline unsigned int next_memory_node(int nid)
0451 {
0452     return next_node(nid, node_states[N_MEMORY]);
0453 }
0454 
0455 extern unsigned int nr_node_ids;
0456 extern unsigned int nr_online_nodes;
0457 
0458 static inline void node_set_online(int nid)
0459 {
0460     node_set_state(nid, N_ONLINE);
0461     nr_online_nodes = num_node_state(N_ONLINE);
0462 }
0463 
0464 static inline void node_set_offline(int nid)
0465 {
0466     node_clear_state(nid, N_ONLINE);
0467     nr_online_nodes = num_node_state(N_ONLINE);
0468 }
0469 
0470 #else
0471 
0472 static inline int node_state(int node, enum node_states state)
0473 {
0474     return node == 0;
0475 }
0476 
0477 static inline void node_set_state(int node, enum node_states state)
0478 {
0479 }
0480 
0481 static inline void node_clear_state(int node, enum node_states state)
0482 {
0483 }
0484 
0485 static inline int num_node_state(enum node_states state)
0486 {
0487     return 1;
0488 }
0489 
0490 #define for_each_node_state(node, __state) \
0491     for ( (node) = 0; (node) == 0; (node) = 1)
0492 
0493 #define first_online_node   0
0494 #define first_memory_node   0
0495 #define next_online_node(nid)   (MAX_NUMNODES)
0496 #define nr_node_ids     1U
0497 #define nr_online_nodes     1U
0498 
0499 #define node_set_online(node)      node_set_state((node), N_ONLINE)
0500 #define node_set_offline(node)     node_clear_state((node), N_ONLINE)
0501 
0502 #endif
0503 
0504 static inline int node_random(const nodemask_t *maskp)
0505 {
0506 #if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1)
0507     int w, bit = NUMA_NO_NODE;
0508 
0509     w = nodes_weight(*maskp);
0510     if (w)
0511         bit = bitmap_ord_to_pos(maskp->bits,
0512             get_random_int() % w, MAX_NUMNODES);
0513     return bit;
0514 #else
0515     return 0;
0516 #endif
0517 }
0518 
0519 #define node_online_map     node_states[N_ONLINE]
0520 #define node_possible_map   node_states[N_POSSIBLE]
0521 
0522 #define num_online_nodes()  num_node_state(N_ONLINE)
0523 #define num_possible_nodes()    num_node_state(N_POSSIBLE)
0524 #define node_online(node)   node_state((node), N_ONLINE)
0525 #define node_possible(node) node_state((node), N_POSSIBLE)
0526 
0527 #define for_each_node(node)    for_each_node_state(node, N_POSSIBLE)
0528 #define for_each_online_node(node) for_each_node_state(node, N_ONLINE)
0529 
0530 /*
0531  * For nodemask scratch area.
0532  * NODEMASK_ALLOC(type, name) allocates an object with a specified type and
0533  * name.
0534  */
0535 #if NODES_SHIFT > 8 /* nodemask_t > 32 bytes */
0536 #define NODEMASK_ALLOC(type, name, gfp_flags)   \
0537             type *name = kmalloc(sizeof(*name), gfp_flags)
0538 #define NODEMASK_FREE(m)            kfree(m)
0539 #else
0540 #define NODEMASK_ALLOC(type, name, gfp_flags)   type _##name, *name = &_##name
0541 #define NODEMASK_FREE(m)            do {} while (0)
0542 #endif
0543 
0544 /* Example structure for using NODEMASK_ALLOC, used in mempolicy. */
0545 struct nodemask_scratch {
0546     nodemask_t  mask1;
0547     nodemask_t  mask2;
0548 };
0549 
0550 #define NODEMASK_SCRATCH(x)                     \
0551             NODEMASK_ALLOC(struct nodemask_scratch, x,  \
0552                     GFP_KERNEL | __GFP_NORETRY)
0553 #define NODEMASK_SCRATCH_FREE(x)    NODEMASK_FREE(x)
0554 
0555 
0556 #endif /* __LINUX_NODEMASK_H */