2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 * Copyright (C) 2016 Intel, Matthew Wilcox
8 * Copyright (C) 2016 Intel, Ross Zwisler
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License as
12 * published by the Free Software Foundation; either version 2, or (at
13 * your option) any later version.
15 * This program is distributed in the hope that it will be useful, but
16 * WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 #include <linux/bitmap.h>
26 #include <linux/bitops.h>
27 #include <linux/bug.h>
28 #include <linux/cpu.h>
29 #include <linux/errno.h>
30 #include <linux/export.h>
31 #include <linux/idr.h>
32 #include <linux/init.h>
33 #include <linux/kernel.h>
34 #include <linux/kmemleak.h>
35 #include <linux/percpu.h>
36 #include <linux/preempt.h> /* in_interrupt() */
37 #include <linux/radix-tree.h>
38 #include <linux/rcupdate.h>
39 #include <linux/slab.h>
40 #include <linux/string.h>
43 /* Number of nodes in fully populated tree of given height */
44 static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
47 * Radix tree node cache.
49 static struct kmem_cache *radix_tree_node_cachep;
52 * The radix tree is variable-height, so an insert operation not only has
53 * to build the branch to its corresponding item, it also has to build the
54 * branch to existing items if the size has to be increased (by
57 * The worst case is a zero height tree with just a single item at index 0,
58 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
59 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
62 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
65 * The IDR does not have to be as high as the radix tree since it uses
66 * signed integers, not unsigned longs.
68 #define IDR_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(int) - 1)
69 #define IDR_MAX_PATH (DIV_ROUND_UP(IDR_INDEX_BITS, \
70 RADIX_TREE_MAP_SHIFT))
71 #define IDR_PRELOAD_SIZE (IDR_MAX_PATH * 2 - 1)
74 * The IDA is even shorter since it uses a bitmap at the last level.
76 #define IDA_INDEX_BITS (8 * sizeof(int) - 1 - ilog2(IDA_BITMAP_BITS))
77 #define IDA_MAX_PATH (DIV_ROUND_UP(IDA_INDEX_BITS, \
78 RADIX_TREE_MAP_SHIFT))
79 #define IDA_PRELOAD_SIZE (IDA_MAX_PATH * 2 - 1)
82 * Per-cpu pool of preloaded nodes
84 struct radix_tree_preload {
86 /* nodes->parent points to next preallocated node */
87 struct radix_tree_node *nodes;
89 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
91 static inline struct radix_tree_node *entry_to_node(void *ptr)
93 return (void *)((unsigned long)ptr & ~RADIX_TREE_INTERNAL_NODE);
96 static inline void *node_to_entry(void *ptr)
98 return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
101 #define RADIX_TREE_RETRY node_to_entry(NULL)
103 #ifdef CONFIG_RADIX_TREE_MULTIORDER
104 /* Sibling slots point directly to another slot in the same node */
106 bool is_sibling_entry(const struct radix_tree_node *parent, void *node)
108 void __rcu **ptr = node;
109 return (parent->slots <= ptr) &&
110 (ptr < parent->slots + RADIX_TREE_MAP_SIZE);
114 bool is_sibling_entry(const struct radix_tree_node *parent, void *node)
120 static inline unsigned long
121 get_slot_offset(const struct radix_tree_node *parent, void __rcu **slot)
123 return parent ? slot - parent->slots : 0;
126 static unsigned int radix_tree_descend(const struct radix_tree_node *parent,
127 struct radix_tree_node **nodep, unsigned long index)
129 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
130 void __rcu **entry = rcu_dereference_raw(parent->slots[offset]);
132 #ifdef CONFIG_RADIX_TREE_MULTIORDER
133 if (radix_tree_is_internal_node(entry)) {
134 if (is_sibling_entry(parent, entry)) {
135 void __rcu **sibentry;
136 sibentry = (void __rcu **) entry_to_node(entry);
137 offset = get_slot_offset(parent, sibentry);
138 entry = rcu_dereference_raw(*sibentry);
143 *nodep = (void *)entry;
147 static inline gfp_t root_gfp_mask(const struct radix_tree_root *root)
149 return root->gfp_mask & (__GFP_BITS_MASK & ~GFP_ZONEMASK);
152 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
155 __set_bit(offset, node->tags[tag]);
158 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
161 __clear_bit(offset, node->tags[tag]);
164 static inline int tag_get(const struct radix_tree_node *node, unsigned int tag,
167 return test_bit(offset, node->tags[tag]);
170 static inline void root_tag_set(struct radix_tree_root *root, unsigned tag)
172 root->gfp_mask |= (__force gfp_t)(1 << (tag + ROOT_TAG_SHIFT));
175 static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
177 root->gfp_mask &= (__force gfp_t)~(1 << (tag + ROOT_TAG_SHIFT));
180 static inline void root_tag_clear_all(struct radix_tree_root *root)
182 root->gfp_mask &= (1 << ROOT_TAG_SHIFT) - 1;
185 static inline int root_tag_get(const struct radix_tree_root *root, unsigned tag)
187 return (__force int)root->gfp_mask & (1 << (tag + ROOT_TAG_SHIFT));
190 static inline unsigned root_tags_get(const struct radix_tree_root *root)
192 return (__force unsigned)root->gfp_mask >> ROOT_TAG_SHIFT;
195 static inline bool is_idr(const struct radix_tree_root *root)
197 return !!(root->gfp_mask & ROOT_IS_IDR);
201 * Returns 1 if any slot in the node has this tag set.
202 * Otherwise returns 0.
204 static inline int any_tag_set(const struct radix_tree_node *node,
208 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
209 if (node->tags[tag][idx])
215 static inline void all_tag_set(struct radix_tree_node *node, unsigned int tag)
217 bitmap_fill(node->tags[tag], RADIX_TREE_MAP_SIZE);
221 * radix_tree_find_next_bit - find the next set bit in a memory region
223 * @addr: The address to base the search on
224 * @size: The bitmap size in bits
225 * @offset: The bitnumber to start searching at
227 * Unrollable variant of find_next_bit() for constant size arrays.
228 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
229 * Returns next bit offset, or size if nothing found.
231 static __always_inline unsigned long
232 radix_tree_find_next_bit(struct radix_tree_node *node, unsigned int tag,
233 unsigned long offset)
235 const unsigned long *addr = node->tags[tag];
237 if (offset < RADIX_TREE_MAP_SIZE) {
240 addr += offset / BITS_PER_LONG;
241 tmp = *addr >> (offset % BITS_PER_LONG);
243 return __ffs(tmp) + offset;
244 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
245 while (offset < RADIX_TREE_MAP_SIZE) {
248 return __ffs(tmp) + offset;
249 offset += BITS_PER_LONG;
252 return RADIX_TREE_MAP_SIZE;
255 static unsigned int iter_offset(const struct radix_tree_iter *iter)
257 return (iter->index >> iter_shift(iter)) & RADIX_TREE_MAP_MASK;
261 * The maximum index which can be stored in a radix tree
263 static inline unsigned long shift_maxindex(unsigned int shift)
265 return (RADIX_TREE_MAP_SIZE << shift) - 1;
268 static inline unsigned long node_maxindex(const struct radix_tree_node *node)
270 return shift_maxindex(node->shift);
273 static unsigned long next_index(unsigned long index,
274 const struct radix_tree_node *node,
275 unsigned long offset)
277 return (index & ~node_maxindex(node)) + (offset << node->shift);
281 static void dump_node(struct radix_tree_node *node, unsigned long index)
285 pr_debug("radix node: %p offset %d indices %lu-%lu parent %p tags %lx %lx %lx shift %d count %d exceptional %d\n",
286 node, node->offset, index, index | node_maxindex(node),
288 node->tags[0][0], node->tags[1][0], node->tags[2][0],
289 node->shift, node->count, node->exceptional);
291 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
292 unsigned long first = index | (i << node->shift);
293 unsigned long last = first | ((1UL << node->shift) - 1);
294 void *entry = node->slots[i];
297 if (entry == RADIX_TREE_RETRY) {
298 pr_debug("radix retry offset %ld indices %lu-%lu parent %p\n",
299 i, first, last, node);
300 } else if (!radix_tree_is_internal_node(entry)) {
301 pr_debug("radix entry %p offset %ld indices %lu-%lu parent %p\n",
302 entry, i, first, last, node);
303 } else if (is_sibling_entry(node, entry)) {
304 pr_debug("radix sblng %p offset %ld indices %lu-%lu parent %p val %p\n",
305 entry, i, first, last, node,
306 *(void **)entry_to_node(entry));
308 dump_node(entry_to_node(entry), first);
314 static void radix_tree_dump(struct radix_tree_root *root)
316 pr_debug("radix root: %p rnode %p tags %x\n",
318 root->gfp_mask >> ROOT_TAG_SHIFT);
319 if (!radix_tree_is_internal_node(root->rnode))
321 dump_node(entry_to_node(root->rnode), 0);
324 static void dump_ida_node(void *entry, unsigned long index)
331 if (radix_tree_is_internal_node(entry)) {
332 struct radix_tree_node *node = entry_to_node(entry);
334 pr_debug("ida node: %p offset %d indices %lu-%lu parent %p free %lx shift %d count %d\n",
335 node, node->offset, index * IDA_BITMAP_BITS,
336 ((index | node_maxindex(node)) + 1) *
338 node->parent, node->tags[0][0], node->shift,
340 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++)
341 dump_ida_node(node->slots[i],
342 index | (i << node->shift));
343 } else if (xa_is_value(entry)) {
344 pr_debug("ida excp: %p offset %d indices %lu-%lu data %lx\n",
345 entry, (int)(index & RADIX_TREE_MAP_MASK),
346 index * IDA_BITMAP_BITS,
347 index * IDA_BITMAP_BITS + BITS_PER_XA_VALUE,
350 struct ida_bitmap *bitmap = entry;
352 pr_debug("ida btmp: %p offset %d indices %lu-%lu data", bitmap,
353 (int)(index & RADIX_TREE_MAP_MASK),
354 index * IDA_BITMAP_BITS,
355 (index + 1) * IDA_BITMAP_BITS - 1);
356 for (i = 0; i < IDA_BITMAP_LONGS; i++)
357 pr_cont(" %lx", bitmap->bitmap[i]);
362 static void ida_dump(struct ida *ida)
364 struct radix_tree_root *root = &ida->ida_rt;
365 pr_debug("ida: %p node %p free %d\n", ida, root->rnode,
366 root->gfp_mask >> ROOT_TAG_SHIFT);
367 dump_ida_node(root->rnode, 0);
372 * This assumes that the caller has performed appropriate preallocation, and
373 * that the caller has pinned this thread of control to the current CPU.
375 static struct radix_tree_node *
376 radix_tree_node_alloc(gfp_t gfp_mask, struct radix_tree_node *parent,
377 struct radix_tree_root *root,
378 unsigned int shift, unsigned int offset,
379 unsigned int count, unsigned int exceptional)
381 struct radix_tree_node *ret = NULL;
384 * Preload code isn't irq safe and it doesn't make sense to use
385 * preloading during an interrupt anyway as all the allocations have
386 * to be atomic. So just do normal allocation when in interrupt.
388 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
389 struct radix_tree_preload *rtp;
392 * Even if the caller has preloaded, try to allocate from the
393 * cache first for the new node to get accounted to the memory
396 ret = kmem_cache_alloc(radix_tree_node_cachep,
397 gfp_mask | __GFP_NOWARN);
402 * Provided the caller has preloaded here, we will always
403 * succeed in getting a node here (and never reach
406 rtp = this_cpu_ptr(&radix_tree_preloads);
409 rtp->nodes = ret->parent;
413 * Update the allocation stack trace as this is more useful
416 kmemleak_update_trace(ret);
419 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
421 BUG_ON(radix_tree_is_internal_node(ret));
424 ret->offset = offset;
426 ret->exceptional = exceptional;
427 ret->parent = parent;
433 static void radix_tree_node_rcu_free(struct rcu_head *head)
435 struct radix_tree_node *node =
436 container_of(head, struct radix_tree_node, rcu_head);
439 * Must only free zeroed nodes into the slab. We can be left with
440 * non-NULL entries by radix_tree_free_nodes, so clear the entries
443 memset(node->slots, 0, sizeof(node->slots));
444 memset(node->tags, 0, sizeof(node->tags));
445 INIT_LIST_HEAD(&node->private_list);
447 kmem_cache_free(radix_tree_node_cachep, node);
451 radix_tree_node_free(struct radix_tree_node *node)
453 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
457 * Load up this CPU's radix_tree_node buffer with sufficient objects to
458 * ensure that the addition of a single element in the tree cannot fail. On
459 * success, return zero, with preemption disabled. On error, return -ENOMEM
460 * with preemption not disabled.
462 * To make use of this facility, the radix tree must be initialised without
463 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
465 static __must_check int __radix_tree_preload(gfp_t gfp_mask, unsigned nr)
467 struct radix_tree_preload *rtp;
468 struct radix_tree_node *node;
472 * Nodes preloaded by one cgroup can be be used by another cgroup, so
473 * they should never be accounted to any particular memory cgroup.
475 gfp_mask &= ~__GFP_ACCOUNT;
478 rtp = this_cpu_ptr(&radix_tree_preloads);
479 while (rtp->nr < nr) {
481 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
485 rtp = this_cpu_ptr(&radix_tree_preloads);
487 node->parent = rtp->nodes;
491 kmem_cache_free(radix_tree_node_cachep, node);
500 * Load up this CPU's radix_tree_node buffer with sufficient objects to
501 * ensure that the addition of a single element in the tree cannot fail. On
502 * success, return zero, with preemption disabled. On error, return -ENOMEM
503 * with preemption not disabled.
505 * To make use of this facility, the radix tree must be initialised without
506 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
508 int radix_tree_preload(gfp_t gfp_mask)
510 /* Warn on non-sensical use... */
511 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
512 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
514 EXPORT_SYMBOL(radix_tree_preload);
517 * The same as above function, except we don't guarantee preloading happens.
518 * We do it, if we decide it helps. On success, return zero with preemption
519 * disabled. On error, return -ENOMEM with preemption not disabled.
521 int radix_tree_maybe_preload(gfp_t gfp_mask)
523 if (gfpflags_allow_blocking(gfp_mask))
524 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
525 /* Preloading doesn't help anything with this gfp mask, skip it */
529 EXPORT_SYMBOL(radix_tree_maybe_preload);
531 #ifdef CONFIG_RADIX_TREE_MULTIORDER
533 * Preload with enough objects to ensure that we can split a single entry
534 * of order @old_order into many entries of size @new_order
536 int radix_tree_split_preload(unsigned int old_order, unsigned int new_order,
539 unsigned top = 1 << (old_order % RADIX_TREE_MAP_SHIFT);
540 unsigned layers = (old_order / RADIX_TREE_MAP_SHIFT) -
541 (new_order / RADIX_TREE_MAP_SHIFT);
544 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
545 BUG_ON(new_order >= old_order);
548 nr = nr * RADIX_TREE_MAP_SIZE + 1;
549 return __radix_tree_preload(gfp_mask, top * nr);
554 * The same as function above, but preload number of nodes required to insert
555 * (1 << order) continuous naturally-aligned elements.
557 int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order)
559 unsigned long nr_subtrees;
560 int nr_nodes, subtree_height;
562 /* Preloading doesn't help anything with this gfp mask, skip it */
563 if (!gfpflags_allow_blocking(gfp_mask)) {
569 * Calculate number and height of fully populated subtrees it takes to
570 * store (1 << order) elements.
572 nr_subtrees = 1 << order;
573 for (subtree_height = 0; nr_subtrees > RADIX_TREE_MAP_SIZE;
575 nr_subtrees >>= RADIX_TREE_MAP_SHIFT;
578 * The worst case is zero height tree with a single item at index 0 and
579 * then inserting items starting at ULONG_MAX - (1 << order).
581 * This requires RADIX_TREE_MAX_PATH nodes to build branch from root to
584 nr_nodes = RADIX_TREE_MAX_PATH;
586 /* Plus branch to fully populated subtrees. */
587 nr_nodes += RADIX_TREE_MAX_PATH - subtree_height;
589 /* Root node is shared. */
592 /* Plus nodes required to build subtrees. */
593 nr_nodes += nr_subtrees * height_to_maxnodes[subtree_height];
595 return __radix_tree_preload(gfp_mask, nr_nodes);
598 static unsigned radix_tree_load_root(const struct radix_tree_root *root,
599 struct radix_tree_node **nodep, unsigned long *maxindex)
601 struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
605 if (likely(radix_tree_is_internal_node(node))) {
606 node = entry_to_node(node);
607 *maxindex = node_maxindex(node);
608 return node->shift + RADIX_TREE_MAP_SHIFT;
616 * Extend a radix tree so it can store key @index.
618 static int radix_tree_extend(struct radix_tree_root *root, gfp_t gfp,
619 unsigned long index, unsigned int shift)
622 unsigned int maxshift;
625 /* Figure out what the shift should be. */
627 while (index > shift_maxindex(maxshift))
628 maxshift += RADIX_TREE_MAP_SHIFT;
630 entry = rcu_dereference_raw(root->rnode);
631 if (!entry && (!is_idr(root) || root_tag_get(root, IDR_FREE)))
635 struct radix_tree_node *node = radix_tree_node_alloc(gfp, NULL,
636 root, shift, 0, 1, 0);
641 all_tag_set(node, IDR_FREE);
642 if (!root_tag_get(root, IDR_FREE)) {
643 tag_clear(node, IDR_FREE, 0);
644 root_tag_set(root, IDR_FREE);
647 /* Propagate the aggregated tag info to the new child */
648 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
649 if (root_tag_get(root, tag))
650 tag_set(node, tag, 0);
654 BUG_ON(shift > BITS_PER_LONG);
655 if (radix_tree_is_internal_node(entry)) {
656 entry_to_node(entry)->parent = node;
657 } else if (xa_is_value(entry)) {
658 /* Moving an exceptional root->rnode to a node */
659 node->exceptional = 1;
662 * entry was already in the radix tree, so we do not need
663 * rcu_assign_pointer here
665 node->slots[0] = (void __rcu *)entry;
666 entry = node_to_entry(node);
667 rcu_assign_pointer(root->rnode, entry);
668 shift += RADIX_TREE_MAP_SHIFT;
669 } while (shift <= maxshift);
671 return maxshift + RADIX_TREE_MAP_SHIFT;
675 * radix_tree_shrink - shrink radix tree to minimum height
676 * @root radix tree root
678 static inline bool radix_tree_shrink(struct radix_tree_root *root,
679 radix_tree_update_node_t update_node)
684 struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
685 struct radix_tree_node *child;
687 if (!radix_tree_is_internal_node(node))
689 node = entry_to_node(node);
692 * The candidate node has more than one child, or its child
693 * is not at the leftmost slot, or the child is a multiorder
694 * entry, we cannot shrink.
696 if (node->count != 1)
698 child = rcu_dereference_raw(node->slots[0]);
701 if (!radix_tree_is_internal_node(child) && node->shift)
705 * For an IDR, we must not shrink entry 0 into the root in
706 * case somebody calls idr_replace() with a pointer that
707 * appears to be an internal entry
709 if (!node->shift && is_idr(root))
712 if (radix_tree_is_internal_node(child))
713 entry_to_node(child)->parent = NULL;
716 * We don't need rcu_assign_pointer(), since we are simply
717 * moving the node from one part of the tree to another: if it
718 * was safe to dereference the old pointer to it
719 * (node->slots[0]), it will be safe to dereference the new
720 * one (root->rnode) as far as dependent read barriers go.
722 root->rnode = (void __rcu *)child;
723 if (is_idr(root) && !tag_get(node, IDR_FREE, 0))
724 root_tag_clear(root, IDR_FREE);
727 * We have a dilemma here. The node's slot[0] must not be
728 * NULLed in case there are concurrent lookups expecting to
729 * find the item. However if this was a bottom-level node,
730 * then it may be subject to the slot pointer being visible
731 * to callers dereferencing it. If item corresponding to
732 * slot[0] is subsequently deleted, these callers would expect
733 * their slot to become empty sooner or later.
735 * For example, lockless pagecache will look up a slot, deref
736 * the page pointer, and if the page has 0 refcount it means it
737 * was concurrently deleted from pagecache so try the deref
738 * again. Fortunately there is already a requirement for logic
739 * to retry the entire slot lookup -- the indirect pointer
740 * problem (replacing direct root node with an indirect pointer
741 * also results in a stale slot). So tag the slot as indirect
742 * to force callers to retry.
745 if (!radix_tree_is_internal_node(child)) {
746 node->slots[0] = (void __rcu *)RADIX_TREE_RETRY;
751 WARN_ON_ONCE(!list_empty(&node->private_list));
752 radix_tree_node_free(node);
759 static bool delete_node(struct radix_tree_root *root,
760 struct radix_tree_node *node,
761 radix_tree_update_node_t update_node)
763 bool deleted = false;
766 struct radix_tree_node *parent;
769 if (node_to_entry(node) ==
770 rcu_dereference_raw(root->rnode))
771 deleted |= radix_tree_shrink(root,
776 parent = node->parent;
778 parent->slots[node->offset] = NULL;
782 * Shouldn't the tags already have all been cleared
786 root_tag_clear_all(root);
790 WARN_ON_ONCE(!list_empty(&node->private_list));
791 radix_tree_node_free(node);
801 * __radix_tree_create - create a slot in a radix tree
802 * @root: radix tree root
804 * @order: index occupies 2^order aligned slots
805 * @nodep: returns node
806 * @slotp: returns slot
808 * Create, if necessary, and return the node and slot for an item
809 * at position @index in the radix tree @root.
811 * Until there is more than one item in the tree, no nodes are
812 * allocated and @root->rnode is used as a direct slot instead of
813 * pointing to a node, in which case *@nodep will be NULL.
815 * Returns -ENOMEM, or 0 for success.
817 int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
818 unsigned order, struct radix_tree_node **nodep,
821 struct radix_tree_node *node = NULL, *child;
822 void __rcu **slot = (void __rcu **)&root->rnode;
823 unsigned long maxindex;
824 unsigned int shift, offset = 0;
825 unsigned long max = index | ((1UL << order) - 1);
826 gfp_t gfp = root_gfp_mask(root);
828 shift = radix_tree_load_root(root, &child, &maxindex);
830 /* Make sure the tree is high enough. */
831 if (order > 0 && max == ((1UL << order) - 1))
833 if (max > maxindex) {
834 int error = radix_tree_extend(root, gfp, max, shift);
838 child = rcu_dereference_raw(root->rnode);
841 while (shift > order) {
842 shift -= RADIX_TREE_MAP_SHIFT;
844 /* Have to add a child node. */
845 child = radix_tree_node_alloc(gfp, node, root, shift,
849 rcu_assign_pointer(*slot, node_to_entry(child));
852 } else if (!radix_tree_is_internal_node(child))
855 /* Go a level down */
856 node = entry_to_node(child);
857 offset = radix_tree_descend(node, &child, index);
858 slot = &node->slots[offset];
869 * Free any nodes below this node. The tree is presumed to not need
870 * shrinking, and any user data in the tree is presumed to not need a
871 * destructor called on it. If we need to add a destructor, we can
872 * add that functionality later. Note that we may not clear tags or
873 * slots from the tree as an RCU walker may still have a pointer into
874 * this subtree. We could replace the entries with RADIX_TREE_RETRY,
875 * but we'll still have to clear those in rcu_free.
877 static void radix_tree_free_nodes(struct radix_tree_node *node)
880 struct radix_tree_node *child = entry_to_node(node);
883 void *entry = rcu_dereference_raw(child->slots[offset]);
884 if (radix_tree_is_internal_node(entry) && child->shift &&
885 !is_sibling_entry(child, entry)) {
886 child = entry_to_node(entry);
891 while (offset == RADIX_TREE_MAP_SIZE) {
892 struct radix_tree_node *old = child;
893 offset = child->offset + 1;
894 child = child->parent;
895 WARN_ON_ONCE(!list_empty(&old->private_list));
896 radix_tree_node_free(old);
897 if (old == entry_to_node(node))
903 #ifdef CONFIG_RADIX_TREE_MULTIORDER
904 static inline int insert_entries(struct radix_tree_node *node,
905 void __rcu **slot, void *item, unsigned order, bool replace)
907 struct radix_tree_node *child;
908 unsigned i, n, tag, offset, tags = 0;
911 if (order > node->shift)
912 n = 1 << (order - node->shift);
915 offset = get_slot_offset(node, slot);
922 offset = offset & ~(n - 1);
923 slot = &node->slots[offset];
925 child = node_to_entry(slot);
927 for (i = 0; i < n; i++) {
931 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
932 if (tag_get(node, tag, offset + i))
939 for (i = 0; i < n; i++) {
940 struct radix_tree_node *old = rcu_dereference_raw(slot[i]);
942 rcu_assign_pointer(slot[i], child);
943 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
944 if (tags & (1 << tag))
945 tag_clear(node, tag, offset + i);
947 rcu_assign_pointer(slot[i], item);
948 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
949 if (tags & (1 << tag))
950 tag_set(node, tag, offset);
952 if (radix_tree_is_internal_node(old) &&
953 !is_sibling_entry(node, old) &&
954 (old != RADIX_TREE_RETRY))
955 radix_tree_free_nodes(old);
956 if (xa_is_value(old))
961 if (xa_is_value(item))
962 node->exceptional += n;
967 static inline int insert_entries(struct radix_tree_node *node,
968 void __rcu **slot, void *item, unsigned order, bool replace)
972 rcu_assign_pointer(*slot, item);
975 if (xa_is_value(item))
983 * __radix_tree_insert - insert into a radix tree
984 * @root: radix tree root
986 * @order: key covers the 2^order indices around index
987 * @item: item to insert
989 * Insert an item into the radix tree at position @index.
991 int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
992 unsigned order, void *item)
994 struct radix_tree_node *node;
998 BUG_ON(radix_tree_is_internal_node(item));
1000 error = __radix_tree_create(root, index, order, &node, &slot);
1004 error = insert_entries(node, slot, item, order, false);
1009 unsigned offset = get_slot_offset(node, slot);
1010 BUG_ON(tag_get(node, 0, offset));
1011 BUG_ON(tag_get(node, 1, offset));
1012 BUG_ON(tag_get(node, 2, offset));
1014 BUG_ON(root_tags_get(root));
1019 EXPORT_SYMBOL(__radix_tree_insert);
1022 * __radix_tree_lookup - lookup an item in a radix tree
1023 * @root: radix tree root
1025 * @nodep: returns node
1026 * @slotp: returns slot
1028 * Lookup and return the item at position @index in the radix
1031 * Until there is more than one item in the tree, no nodes are
1032 * allocated and @root->rnode is used as a direct slot instead of
1033 * pointing to a node, in which case *@nodep will be NULL.
1035 void *__radix_tree_lookup(const struct radix_tree_root *root,
1036 unsigned long index, struct radix_tree_node **nodep,
1037 void __rcu ***slotp)
1039 struct radix_tree_node *node, *parent;
1040 unsigned long maxindex;
1045 slot = (void __rcu **)&root->rnode;
1046 radix_tree_load_root(root, &node, &maxindex);
1047 if (index > maxindex)
1050 while (radix_tree_is_internal_node(node)) {
1053 if (node == RADIX_TREE_RETRY)
1055 parent = entry_to_node(node);
1056 offset = radix_tree_descend(parent, &node, index);
1057 slot = parent->slots + offset;
1058 if (parent->shift == 0)
1070 * radix_tree_lookup_slot - lookup a slot in a radix tree
1071 * @root: radix tree root
1074 * Returns: the slot corresponding to the position @index in the
1075 * radix tree @root. This is useful for update-if-exists operations.
1077 * This function can be called under rcu_read_lock iff the slot is not
1078 * modified by radix_tree_replace_slot, otherwise it must be called
1079 * exclusive from other writers. Any dereference of the slot must be done
1080 * using radix_tree_deref_slot.
1082 void __rcu **radix_tree_lookup_slot(const struct radix_tree_root *root,
1083 unsigned long index)
1087 if (!__radix_tree_lookup(root, index, NULL, &slot))
1091 EXPORT_SYMBOL(radix_tree_lookup_slot);
1094 * radix_tree_lookup - perform lookup operation on a radix tree
1095 * @root: radix tree root
1098 * Lookup the item at the position @index in the radix tree @root.
1100 * This function can be called under rcu_read_lock, however the caller
1101 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
1102 * them safely). No RCU barriers are required to access or modify the
1103 * returned item, however.
1105 void *radix_tree_lookup(const struct radix_tree_root *root, unsigned long index)
1107 return __radix_tree_lookup(root, index, NULL, NULL);
1109 EXPORT_SYMBOL(radix_tree_lookup);
1111 static inline void replace_sibling_entries(struct radix_tree_node *node,
1112 void __rcu **slot, int count, int exceptional)
1114 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1115 void *ptr = node_to_entry(slot);
1116 unsigned offset = get_slot_offset(node, slot) + 1;
1118 while (offset < RADIX_TREE_MAP_SIZE) {
1119 if (rcu_dereference_raw(node->slots[offset]) != ptr)
1122 node->slots[offset] = NULL;
1125 node->exceptional += exceptional;
1131 static void replace_slot(void __rcu **slot, void *item,
1132 struct radix_tree_node *node, int count, int exceptional)
1134 if (node && (count || exceptional)) {
1135 node->count += count;
1136 node->exceptional += exceptional;
1137 replace_sibling_entries(node, slot, count, exceptional);
1140 rcu_assign_pointer(*slot, item);
1143 static bool node_tag_get(const struct radix_tree_root *root,
1144 const struct radix_tree_node *node,
1145 unsigned int tag, unsigned int offset)
1148 return tag_get(node, tag, offset);
1149 return root_tag_get(root, tag);
1153 * IDR users want to be able to store NULL in the tree, so if the slot isn't
1154 * free, don't adjust the count, even if it's transitioning between NULL and
1155 * non-NULL. For the IDA, we mark slots as being IDR_FREE while they still
1156 * have empty bits, but it only stores NULL in slots when they're being
1159 static int calculate_count(struct radix_tree_root *root,
1160 struct radix_tree_node *node, void __rcu **slot,
1161 void *item, void *old)
1164 unsigned offset = get_slot_offset(node, slot);
1165 bool free = node_tag_get(root, node, IDR_FREE, offset);
1171 return !!item - !!old;
1175 * __radix_tree_replace - replace item in a slot
1176 * @root: radix tree root
1177 * @node: pointer to tree node
1178 * @slot: pointer to slot in @node
1179 * @item: new item to store in the slot.
1180 * @update_node: callback for changing leaf nodes
1182 * For use with __radix_tree_lookup(). Caller must hold tree write locked
1183 * across slot lookup and replacement.
1185 void __radix_tree_replace(struct radix_tree_root *root,
1186 struct radix_tree_node *node,
1187 void __rcu **slot, void *item,
1188 radix_tree_update_node_t update_node)
1190 void *old = rcu_dereference_raw(*slot);
1191 int exceptional = !!xa_is_value(item) - !!xa_is_value(old);
1192 int count = calculate_count(root, node, slot, item, old);
1195 * This function supports replacing exceptional entries and
1196 * deleting entries, but that needs accounting against the
1197 * node unless the slot is root->rnode.
1199 WARN_ON_ONCE(!node && (slot != (void __rcu **)&root->rnode) &&
1200 (count || exceptional));
1201 replace_slot(slot, item, node, count, exceptional);
1209 delete_node(root, node, update_node);
1213 * radix_tree_replace_slot - replace item in a slot
1214 * @root: radix tree root
1215 * @slot: pointer to slot
1216 * @item: new item to store in the slot.
1218 * For use with radix_tree_lookup_slot(), radix_tree_gang_lookup_slot(),
1219 * radix_tree_gang_lookup_tag_slot(). Caller must hold tree write locked
1220 * across slot lookup and replacement.
1222 * NOTE: This cannot be used to switch between non-entries (empty slots),
1223 * regular entries, and exceptional entries, as that requires accounting
1224 * inside the radix tree node. When switching from one type of entry or
1225 * deleting, use __radix_tree_lookup() and __radix_tree_replace() or
1226 * radix_tree_iter_replace().
1228 void radix_tree_replace_slot(struct radix_tree_root *root,
1229 void __rcu **slot, void *item)
1231 __radix_tree_replace(root, NULL, slot, item, NULL);
1233 EXPORT_SYMBOL(radix_tree_replace_slot);
1236 * radix_tree_iter_replace - replace item in a slot
1237 * @root: radix tree root
1238 * @slot: pointer to slot
1239 * @item: new item to store in the slot.
1241 * For use with radix_tree_split() and radix_tree_for_each_slot().
1242 * Caller must hold tree write locked across split and replacement.
1244 void radix_tree_iter_replace(struct radix_tree_root *root,
1245 const struct radix_tree_iter *iter,
1246 void __rcu **slot, void *item)
1248 __radix_tree_replace(root, iter->node, slot, item, NULL);
1251 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1253 * radix_tree_join - replace multiple entries with one multiorder entry
1254 * @root: radix tree root
1255 * @index: an index inside the new entry
1256 * @order: order of the new entry
1259 * Call this function to replace several entries with one larger entry.
1260 * The existing entries are presumed to not need freeing as a result of
1263 * The replacement entry will have all the tags set on it that were set
1264 * on any of the entries it is replacing.
1266 int radix_tree_join(struct radix_tree_root *root, unsigned long index,
1267 unsigned order, void *item)
1269 struct radix_tree_node *node;
1273 BUG_ON(radix_tree_is_internal_node(item));
1275 error = __radix_tree_create(root, index, order, &node, &slot);
1277 error = insert_entries(node, slot, item, order, true);
1285 * radix_tree_split - Split an entry into smaller entries
1286 * @root: radix tree root
1287 * @index: An index within the large entry
1288 * @order: Order of new entries
1290 * Call this function as the first step in replacing a multiorder entry
1291 * with several entries of lower order. After this function returns,
1292 * loop over the relevant portion of the tree using radix_tree_for_each_slot()
1293 * and call radix_tree_iter_replace() to set up each new entry.
1295 * The tags from this entry are replicated to all the new entries.
1297 * The radix tree should be locked against modification during the entire
1298 * replacement operation. Lock-free lookups will see RADIX_TREE_RETRY which
1299 * should prompt RCU walkers to restart the lookup from the root.
1301 int radix_tree_split(struct radix_tree_root *root, unsigned long index,
1304 struct radix_tree_node *parent, *node, *child;
1306 unsigned int offset, end;
1307 unsigned n, tag, tags = 0;
1308 gfp_t gfp = root_gfp_mask(root);
1310 if (!__radix_tree_lookup(root, index, &parent, &slot))
1315 offset = get_slot_offset(parent, slot);
1317 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1318 if (tag_get(parent, tag, offset))
1321 for (end = offset + 1; end < RADIX_TREE_MAP_SIZE; end++) {
1322 if (!is_sibling_entry(parent,
1323 rcu_dereference_raw(parent->slots[end])))
1325 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1326 if (tags & (1 << tag))
1327 tag_set(parent, tag, end);
1328 /* rcu_assign_pointer ensures tags are set before RETRY */
1329 rcu_assign_pointer(parent->slots[end], RADIX_TREE_RETRY);
1331 rcu_assign_pointer(parent->slots[offset], RADIX_TREE_RETRY);
1332 parent->exceptional -= (end - offset);
1334 if (order == parent->shift)
1336 if (order > parent->shift) {
1337 while (offset < end)
1338 offset += insert_entries(parent, &parent->slots[offset],
1339 RADIX_TREE_RETRY, order, true);
1346 if (node->shift > order) {
1347 child = radix_tree_node_alloc(gfp, node, root,
1348 node->shift - RADIX_TREE_MAP_SHIFT,
1352 if (node != parent) {
1354 rcu_assign_pointer(node->slots[offset],
1355 node_to_entry(child));
1356 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1357 if (tags & (1 << tag))
1358 tag_set(node, tag, offset);
1366 n = insert_entries(node, &node->slots[offset],
1367 RADIX_TREE_RETRY, order, false);
1368 BUG_ON(n > RADIX_TREE_MAP_SIZE);
1370 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1371 if (tags & (1 << tag))
1372 tag_set(node, tag, offset);
1375 while (offset == RADIX_TREE_MAP_SIZE) {
1378 offset = node->offset;
1380 node = node->parent;
1381 rcu_assign_pointer(node->slots[offset],
1382 node_to_entry(child));
1385 if ((node == parent) && (offset == end))
1390 /* Shouldn't happen; did user forget to preload? */
1391 /* TODO: free all the allocated nodes */
1397 static void node_tag_set(struct radix_tree_root *root,
1398 struct radix_tree_node *node,
1399 unsigned int tag, unsigned int offset)
1402 if (tag_get(node, tag, offset))
1404 tag_set(node, tag, offset);
1405 offset = node->offset;
1406 node = node->parent;
1409 if (!root_tag_get(root, tag))
1410 root_tag_set(root, tag);
1414 * radix_tree_tag_set - set a tag on a radix tree node
1415 * @root: radix tree root
1419 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
1420 * corresponding to @index in the radix tree. From
1421 * the root all the way down to the leaf node.
1423 * Returns the address of the tagged item. Setting a tag on a not-present
1426 void *radix_tree_tag_set(struct radix_tree_root *root,
1427 unsigned long index, unsigned int tag)
1429 struct radix_tree_node *node, *parent;
1430 unsigned long maxindex;
1432 radix_tree_load_root(root, &node, &maxindex);
1433 BUG_ON(index > maxindex);
1435 while (radix_tree_is_internal_node(node)) {
1438 parent = entry_to_node(node);
1439 offset = radix_tree_descend(parent, &node, index);
1442 if (!tag_get(parent, tag, offset))
1443 tag_set(parent, tag, offset);
1446 /* set the root's tag bit */
1447 if (!root_tag_get(root, tag))
1448 root_tag_set(root, tag);
1452 EXPORT_SYMBOL(radix_tree_tag_set);
1455 * radix_tree_iter_tag_set - set a tag on the current iterator entry
1456 * @root: radix tree root
1457 * @iter: iterator state
1460 void radix_tree_iter_tag_set(struct radix_tree_root *root,
1461 const struct radix_tree_iter *iter, unsigned int tag)
1463 node_tag_set(root, iter->node, tag, iter_offset(iter));
1466 static void node_tag_clear(struct radix_tree_root *root,
1467 struct radix_tree_node *node,
1468 unsigned int tag, unsigned int offset)
1471 if (!tag_get(node, tag, offset))
1473 tag_clear(node, tag, offset);
1474 if (any_tag_set(node, tag))
1477 offset = node->offset;
1478 node = node->parent;
1481 /* clear the root's tag bit */
1482 if (root_tag_get(root, tag))
1483 root_tag_clear(root, tag);
1487 * radix_tree_tag_clear - clear a tag on a radix tree node
1488 * @root: radix tree root
1492 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
1493 * corresponding to @index in the radix tree. If this causes
1494 * the leaf node to have no tags set then clear the tag in the
1495 * next-to-leaf node, etc.
1497 * Returns the address of the tagged item on success, else NULL. ie:
1498 * has the same return value and semantics as radix_tree_lookup().
1500 void *radix_tree_tag_clear(struct radix_tree_root *root,
1501 unsigned long index, unsigned int tag)
1503 struct radix_tree_node *node, *parent;
1504 unsigned long maxindex;
1505 int uninitialized_var(offset);
1507 radix_tree_load_root(root, &node, &maxindex);
1508 if (index > maxindex)
1513 while (radix_tree_is_internal_node(node)) {
1514 parent = entry_to_node(node);
1515 offset = radix_tree_descend(parent, &node, index);
1519 node_tag_clear(root, parent, tag, offset);
1523 EXPORT_SYMBOL(radix_tree_tag_clear);
1526 * radix_tree_iter_tag_clear - clear a tag on the current iterator entry
1527 * @root: radix tree root
1528 * @iter: iterator state
1529 * @tag: tag to clear
1531 void radix_tree_iter_tag_clear(struct radix_tree_root *root,
1532 const struct radix_tree_iter *iter, unsigned int tag)
1534 node_tag_clear(root, iter->node, tag, iter_offset(iter));
1538 * radix_tree_tag_get - get a tag on a radix tree node
1539 * @root: radix tree root
1541 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
1545 * 0: tag not present or not set
1548 * Note that the return value of this function may not be relied on, even if
1549 * the RCU lock is held, unless tag modification and node deletion are excluded
1552 int radix_tree_tag_get(const struct radix_tree_root *root,
1553 unsigned long index, unsigned int tag)
1555 struct radix_tree_node *node, *parent;
1556 unsigned long maxindex;
1558 if (!root_tag_get(root, tag))
1561 radix_tree_load_root(root, &node, &maxindex);
1562 if (index > maxindex)
1565 while (radix_tree_is_internal_node(node)) {
1568 parent = entry_to_node(node);
1569 offset = radix_tree_descend(parent, &node, index);
1571 if (!tag_get(parent, tag, offset))
1573 if (node == RADIX_TREE_RETRY)
1579 EXPORT_SYMBOL(radix_tree_tag_get);
1581 static inline void __set_iter_shift(struct radix_tree_iter *iter,
1584 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1585 iter->shift = shift;
1589 /* Construct iter->tags bit-mask from node->tags[tag] array */
1590 static void set_iter_tags(struct radix_tree_iter *iter,
1591 struct radix_tree_node *node, unsigned offset,
1594 unsigned tag_long = offset / BITS_PER_LONG;
1595 unsigned tag_bit = offset % BITS_PER_LONG;
1602 iter->tags = node->tags[tag][tag_long] >> tag_bit;
1604 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
1605 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
1606 /* Pick tags from next element */
1608 iter->tags |= node->tags[tag][tag_long + 1] <<
1609 (BITS_PER_LONG - tag_bit);
1610 /* Clip chunk size, here only BITS_PER_LONG tags */
1611 iter->next_index = __radix_tree_iter_add(iter, BITS_PER_LONG);
1615 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1616 static void __rcu **skip_siblings(struct radix_tree_node **nodep,
1617 void __rcu **slot, struct radix_tree_iter *iter)
1619 while (iter->index < iter->next_index) {
1620 *nodep = rcu_dereference_raw(*slot);
1621 if (*nodep && !is_sibling_entry(iter->node, *nodep))
1624 iter->index = __radix_tree_iter_add(iter, 1);
1632 void __rcu **__radix_tree_next_slot(void __rcu **slot,
1633 struct radix_tree_iter *iter, unsigned flags)
1635 unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1636 struct radix_tree_node *node;
1638 slot = skip_siblings(&node, slot, iter);
1640 while (radix_tree_is_internal_node(node)) {
1642 unsigned long next_index;
1644 if (node == RADIX_TREE_RETRY)
1646 node = entry_to_node(node);
1648 iter->shift = node->shift;
1650 if (flags & RADIX_TREE_ITER_TAGGED) {
1651 offset = radix_tree_find_next_bit(node, tag, 0);
1652 if (offset == RADIX_TREE_MAP_SIZE)
1654 slot = &node->slots[offset];
1655 iter->index = __radix_tree_iter_add(iter, offset);
1656 set_iter_tags(iter, node, offset, tag);
1657 node = rcu_dereference_raw(*slot);
1660 slot = &node->slots[0];
1662 node = rcu_dereference_raw(*slot);
1667 if (offset == RADIX_TREE_MAP_SIZE)
1670 iter->index = __radix_tree_iter_add(iter, offset);
1672 if ((flags & RADIX_TREE_ITER_CONTIG) && (offset > 0))
1674 next_index = (iter->index | shift_maxindex(iter->shift)) + 1;
1675 if (next_index < iter->next_index)
1676 iter->next_index = next_index;
1681 iter->next_index = 0;
1684 EXPORT_SYMBOL(__radix_tree_next_slot);
1686 static void __rcu **skip_siblings(struct radix_tree_node **nodep,
1687 void __rcu **slot, struct radix_tree_iter *iter)
1693 void __rcu **radix_tree_iter_resume(void __rcu **slot,
1694 struct radix_tree_iter *iter)
1696 struct radix_tree_node *node;
1699 iter->index = __radix_tree_iter_add(iter, 1);
1700 skip_siblings(&node, slot, iter);
1701 iter->next_index = iter->index;
1705 EXPORT_SYMBOL(radix_tree_iter_resume);
1708 * radix_tree_next_chunk - find next chunk of slots for iteration
1710 * @root: radix tree root
1711 * @iter: iterator state
1712 * @flags: RADIX_TREE_ITER_* flags and tag index
1713 * Returns: pointer to chunk first slot, or NULL if iteration is over
1715 void __rcu **radix_tree_next_chunk(const struct radix_tree_root *root,
1716 struct radix_tree_iter *iter, unsigned flags)
1718 unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
1719 struct radix_tree_node *node, *child;
1720 unsigned long index, offset, maxindex;
1722 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
1726 * Catch next_index overflow after ~0UL. iter->index never overflows
1727 * during iterating; it can be zero only at the beginning.
1728 * And we cannot overflow iter->next_index in a single step,
1729 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
1731 * This condition also used by radix_tree_next_slot() to stop
1732 * contiguous iterating, and forbid switching to the next chunk.
1734 index = iter->next_index;
1735 if (!index && iter->index)
1739 radix_tree_load_root(root, &child, &maxindex);
1740 if (index > maxindex)
1745 if (!radix_tree_is_internal_node(child)) {
1746 /* Single-slot tree */
1747 iter->index = index;
1748 iter->next_index = maxindex + 1;
1751 __set_iter_shift(iter, 0);
1752 return (void __rcu **)&root->rnode;
1756 node = entry_to_node(child);
1757 offset = radix_tree_descend(node, &child, index);
1759 if ((flags & RADIX_TREE_ITER_TAGGED) ?
1760 !tag_get(node, tag, offset) : !child) {
1762 if (flags & RADIX_TREE_ITER_CONTIG)
1765 if (flags & RADIX_TREE_ITER_TAGGED)
1766 offset = radix_tree_find_next_bit(node, tag,
1769 while (++offset < RADIX_TREE_MAP_SIZE) {
1770 void *slot = rcu_dereference_raw(
1771 node->slots[offset]);
1772 if (is_sibling_entry(node, slot))
1777 index &= ~node_maxindex(node);
1778 index += offset << node->shift;
1779 /* Overflow after ~0UL */
1782 if (offset == RADIX_TREE_MAP_SIZE)
1784 child = rcu_dereference_raw(node->slots[offset]);
1789 if (child == RADIX_TREE_RETRY)
1791 } while (node->shift && radix_tree_is_internal_node(child));
1793 /* Update the iterator state */
1794 iter->index = (index &~ node_maxindex(node)) | (offset << node->shift);
1795 iter->next_index = (index | node_maxindex(node)) + 1;
1797 __set_iter_shift(iter, node->shift);
1799 if (flags & RADIX_TREE_ITER_TAGGED)
1800 set_iter_tags(iter, node, offset, tag);
1802 return node->slots + offset;
1804 EXPORT_SYMBOL(radix_tree_next_chunk);
1807 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1808 * @root: radix tree root
1809 * @results: where the results of the lookup are placed
1810 * @first_index: start the lookup from this key
1811 * @max_items: place up to this many items at *results
1813 * Performs an index-ascending scan of the tree for present items. Places
1814 * them at *@results and returns the number of items which were placed at
1817 * The implementation is naive.
1819 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1820 * rcu_read_lock. In this case, rather than the returned results being
1821 * an atomic snapshot of the tree at a single point in time, the
1822 * semantics of an RCU protected gang lookup are as though multiple
1823 * radix_tree_lookups have been issued in individual locks, and results
1824 * stored in 'results'.
1827 radix_tree_gang_lookup(const struct radix_tree_root *root, void **results,
1828 unsigned long first_index, unsigned int max_items)
1830 struct radix_tree_iter iter;
1832 unsigned int ret = 0;
1834 if (unlikely(!max_items))
1837 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1838 results[ret] = rcu_dereference_raw(*slot);
1841 if (radix_tree_is_internal_node(results[ret])) {
1842 slot = radix_tree_iter_retry(&iter);
1845 if (++ret == max_items)
1851 EXPORT_SYMBOL(radix_tree_gang_lookup);
1854 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1855 * @root: radix tree root
1856 * @results: where the results of the lookup are placed
1857 * @indices: where their indices should be placed (but usually NULL)
1858 * @first_index: start the lookup from this key
1859 * @max_items: place up to this many items at *results
1861 * Performs an index-ascending scan of the tree for present items. Places
1862 * their slots at *@results and returns the number of items which were
1863 * placed at *@results.
1865 * The implementation is naive.
1867 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1868 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1869 * protection, radix_tree_deref_slot may fail requiring a retry.
1872 radix_tree_gang_lookup_slot(const struct radix_tree_root *root,
1873 void __rcu ***results, unsigned long *indices,
1874 unsigned long first_index, unsigned int max_items)
1876 struct radix_tree_iter iter;
1878 unsigned int ret = 0;
1880 if (unlikely(!max_items))
1883 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1884 results[ret] = slot;
1886 indices[ret] = iter.index;
1887 if (++ret == max_items)
1893 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1896 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1898 * @root: radix tree root
1899 * @results: where the results of the lookup are placed
1900 * @first_index: start the lookup from this key
1901 * @max_items: place up to this many items at *results
1902 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1904 * Performs an index-ascending scan of the tree for present items which
1905 * have the tag indexed by @tag set. Places the items at *@results and
1906 * returns the number of items which were placed at *@results.
1909 radix_tree_gang_lookup_tag(const struct radix_tree_root *root, void **results,
1910 unsigned long first_index, unsigned int max_items,
1913 struct radix_tree_iter iter;
1915 unsigned int ret = 0;
1917 if (unlikely(!max_items))
1920 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1921 results[ret] = rcu_dereference_raw(*slot);
1924 if (radix_tree_is_internal_node(results[ret])) {
1925 slot = radix_tree_iter_retry(&iter);
1928 if (++ret == max_items)
1934 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1937 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1938 * radix tree based on a tag
1939 * @root: radix tree root
1940 * @results: where the results of the lookup are placed
1941 * @first_index: start the lookup from this key
1942 * @max_items: place up to this many items at *results
1943 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1945 * Performs an index-ascending scan of the tree for present items which
1946 * have the tag indexed by @tag set. Places the slots at *@results and
1947 * returns the number of slots which were placed at *@results.
1950 radix_tree_gang_lookup_tag_slot(const struct radix_tree_root *root,
1951 void __rcu ***results, unsigned long first_index,
1952 unsigned int max_items, unsigned int tag)
1954 struct radix_tree_iter iter;
1956 unsigned int ret = 0;
1958 if (unlikely(!max_items))
1961 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1962 results[ret] = slot;
1963 if (++ret == max_items)
1969 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1972 * __radix_tree_delete_node - try to free node after clearing a slot
1973 * @root: radix tree root
1974 * @node: node containing @index
1975 * @update_node: callback for changing leaf nodes
1977 * After clearing the slot at @index in @node from radix tree
1978 * rooted at @root, call this function to attempt freeing the
1979 * node and shrinking the tree.
1981 void __radix_tree_delete_node(struct radix_tree_root *root,
1982 struct radix_tree_node *node,
1983 radix_tree_update_node_t update_node)
1985 delete_node(root, node, update_node);
1988 static bool __radix_tree_delete(struct radix_tree_root *root,
1989 struct radix_tree_node *node, void __rcu **slot)
1991 void *old = rcu_dereference_raw(*slot);
1992 int exceptional = xa_is_value(old) ? -1 : 0;
1993 unsigned offset = get_slot_offset(node, slot);
1997 node_tag_set(root, node, IDR_FREE, offset);
1999 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
2000 node_tag_clear(root, node, tag, offset);
2002 replace_slot(slot, NULL, node, -1, exceptional);
2003 return node && delete_node(root, node, NULL);
2007 * radix_tree_iter_delete - delete the entry at this iterator position
2008 * @root: radix tree root
2009 * @iter: iterator state
2010 * @slot: pointer to slot
2012 * Delete the entry at the position currently pointed to by the iterator.
2013 * This may result in the current node being freed; if it is, the iterator
2014 * is advanced so that it will not reference the freed memory. This
2015 * function may be called without any locking if there are no other threads
2016 * which can access this tree.
2018 void radix_tree_iter_delete(struct radix_tree_root *root,
2019 struct radix_tree_iter *iter, void __rcu **slot)
2021 if (__radix_tree_delete(root, iter->node, slot))
2022 iter->index = iter->next_index;
2024 EXPORT_SYMBOL(radix_tree_iter_delete);
2027 * radix_tree_delete_item - delete an item from a radix tree
2028 * @root: radix tree root
2030 * @item: expected item
2032 * Remove @item at @index from the radix tree rooted at @root.
2034 * Return: the deleted entry, or %NULL if it was not present
2035 * or the entry at the given @index was not @item.
2037 void *radix_tree_delete_item(struct radix_tree_root *root,
2038 unsigned long index, void *item)
2040 struct radix_tree_node *node = NULL;
2041 void __rcu **slot = NULL;
2044 entry = __radix_tree_lookup(root, index, &node, &slot);
2047 if (!entry && (!is_idr(root) || node_tag_get(root, node, IDR_FREE,
2048 get_slot_offset(node, slot))))
2051 if (item && entry != item)
2054 __radix_tree_delete(root, node, slot);
2058 EXPORT_SYMBOL(radix_tree_delete_item);
2061 * radix_tree_delete - delete an entry from a radix tree
2062 * @root: radix tree root
2065 * Remove the entry at @index from the radix tree rooted at @root.
2067 * Return: The deleted entry, or %NULL if it was not present.
2069 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
2071 return radix_tree_delete_item(root, index, NULL);
2073 EXPORT_SYMBOL(radix_tree_delete);
2075 void radix_tree_clear_tags(struct radix_tree_root *root,
2076 struct radix_tree_node *node,
2080 unsigned int tag, offset = get_slot_offset(node, slot);
2081 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
2082 node_tag_clear(root, node, tag, offset);
2084 root_tag_clear_all(root);
2089 * radix_tree_tagged - test whether any items in the tree are tagged
2090 * @root: radix tree root
2093 int radix_tree_tagged(const struct radix_tree_root *root, unsigned int tag)
2095 return root_tag_get(root, tag);
2097 EXPORT_SYMBOL(radix_tree_tagged);
2100 * idr_preload - preload for idr_alloc()
2101 * @gfp_mask: allocation mask to use for preloading
2103 * Preallocate memory to use for the next call to idr_alloc(). This function
2104 * returns with preemption disabled. It will be enabled by idr_preload_end().
2106 void idr_preload(gfp_t gfp_mask)
2108 if (__radix_tree_preload(gfp_mask, IDR_PRELOAD_SIZE))
2111 EXPORT_SYMBOL(idr_preload);
2113 int ida_pre_get(struct ida *ida, gfp_t gfp)
2116 * The IDA API has no preload_end() equivalent. Instead,
2117 * ida_get_new() can return -EAGAIN, prompting the caller
2118 * to return to the ida_pre_get() step.
2120 if (!__radix_tree_preload(gfp, IDA_PRELOAD_SIZE))
2123 if (!this_cpu_read(ida_bitmap)) {
2124 struct ida_bitmap *bitmap = kzalloc(sizeof(*bitmap), gfp);
2127 if (this_cpu_cmpxchg(ida_bitmap, NULL, bitmap))
2134 void __rcu **idr_get_free(struct radix_tree_root *root,
2135 struct radix_tree_iter *iter, gfp_t gfp,
2138 struct radix_tree_node *node = NULL, *child;
2139 void __rcu **slot = (void __rcu **)&root->rnode;
2140 unsigned long maxindex, start = iter->next_index;
2141 unsigned int shift, offset = 0;
2144 shift = radix_tree_load_root(root, &child, &maxindex);
2145 if (!radix_tree_tagged(root, IDR_FREE))
2146 start = max(start, maxindex + 1);
2148 return ERR_PTR(-ENOSPC);
2150 if (start > maxindex) {
2151 int error = radix_tree_extend(root, gfp, start, shift);
2153 return ERR_PTR(error);
2155 child = rcu_dereference_raw(root->rnode);
2157 if (start == 0 && shift == 0)
2158 shift = RADIX_TREE_MAP_SHIFT;
2161 shift -= RADIX_TREE_MAP_SHIFT;
2162 if (child == NULL) {
2163 /* Have to add a child node. */
2164 child = radix_tree_node_alloc(gfp, node, root, shift,
2167 return ERR_PTR(-ENOMEM);
2168 all_tag_set(child, IDR_FREE);
2169 rcu_assign_pointer(*slot, node_to_entry(child));
2172 } else if (!radix_tree_is_internal_node(child))
2175 node = entry_to_node(child);
2176 offset = radix_tree_descend(node, &child, start);
2177 if (!tag_get(node, IDR_FREE, offset)) {
2178 offset = radix_tree_find_next_bit(node, IDR_FREE,
2180 start = next_index(start, node, offset);
2182 return ERR_PTR(-ENOSPC);
2183 while (offset == RADIX_TREE_MAP_SIZE) {
2184 offset = node->offset + 1;
2185 node = node->parent;
2188 shift = node->shift;
2190 child = rcu_dereference_raw(node->slots[offset]);
2192 slot = &node->slots[offset];
2195 iter->index = start;
2197 iter->next_index = 1 + min(max, (start | node_maxindex(node)));
2199 iter->next_index = 1;
2201 __set_iter_shift(iter, shift);
2202 set_iter_tags(iter, node, offset, IDR_FREE);
2208 * idr_destroy - release all internal memory from an IDR
2211 * After this function is called, the IDR is empty, and may be reused or
2212 * the data structure containing it may be freed.
2214 * A typical clean-up sequence for objects stored in an idr tree will use
2215 * idr_for_each() to free all objects, if necessary, then idr_destroy() to
2216 * free the memory used to keep track of those objects.
2218 void idr_destroy(struct idr *idr)
2220 struct radix_tree_node *node = rcu_dereference_raw(idr->idr_rt.rnode);
2221 if (radix_tree_is_internal_node(node))
2222 radix_tree_free_nodes(node);
2223 idr->idr_rt.rnode = NULL;
2224 root_tag_set(&idr->idr_rt, IDR_FREE);
2226 EXPORT_SYMBOL(idr_destroy);
2229 radix_tree_node_ctor(void *arg)
2231 struct radix_tree_node *node = arg;
2233 memset(node, 0, sizeof(*node));
2234 INIT_LIST_HEAD(&node->private_list);
2237 static __init unsigned long __maxindex(unsigned int height)
2239 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
2240 int shift = RADIX_TREE_INDEX_BITS - width;
2244 if (shift >= BITS_PER_LONG)
2246 return ~0UL >> shift;
2249 static __init void radix_tree_init_maxnodes(void)
2251 unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1];
2254 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
2255 height_to_maxindex[i] = __maxindex(i);
2256 for (i = 0; i < ARRAY_SIZE(height_to_maxnodes); i++) {
2257 for (j = i; j > 0; j--)
2258 height_to_maxnodes[i] += height_to_maxindex[j - 1] + 1;
2262 static int radix_tree_cpu_dead(unsigned int cpu)
2264 struct radix_tree_preload *rtp;
2265 struct radix_tree_node *node;
2267 /* Free per-cpu pool of preloaded nodes */
2268 rtp = &per_cpu(radix_tree_preloads, cpu);
2271 rtp->nodes = node->parent;
2272 kmem_cache_free(radix_tree_node_cachep, node);
2275 kfree(per_cpu(ida_bitmap, cpu));
2276 per_cpu(ida_bitmap, cpu) = NULL;
2280 void __init radix_tree_init(void)
2284 BUILD_BUG_ON(RADIX_TREE_MAX_TAGS + __GFP_BITS_SHIFT > 32);
2285 BUILD_BUG_ON(ROOT_IS_IDR & ~GFP_ZONEMASK);
2286 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
2287 sizeof(struct radix_tree_node), 0,
2288 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
2289 radix_tree_node_ctor);
2290 radix_tree_init_maxnodes();
2291 ret = cpuhp_setup_state_nocalls(CPUHP_RADIX_DEAD, "lib/radix:dead",
2292 NULL, radix_tree_cpu_dead);