radix tree test suite: Convert iteration test to XArray
With no code left in the kernel using the multiorder radix tree, convert the iteration test from the radix tree to the XArray. It's unlikely to suffer the same bug as the radix tree, but this test will prevent that bug from ever creeping into the XArray implementation. Signed-off-by: Matthew Wilcox <willy@infradead.org>
This commit is contained in:
Родитель
372266ba02
Коммит
47e0fab2b1
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* iteration_check.c: test races having to do with radix tree iteration
|
||||
* iteration_check.c: test races having to do with xarray iteration
|
||||
* Copyright (c) 2016 Intel Corporation
|
||||
* Author: Ross Zwisler <ross.zwisler@linux.intel.com>
|
||||
*
|
||||
|
@ -12,7 +12,6 @@
|
|||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*/
|
||||
#include <linux/radix-tree.h>
|
||||
#include <pthread.h>
|
||||
#include "test.h"
|
||||
|
||||
|
@ -23,29 +22,44 @@
|
|||
|
||||
static pthread_t threads[NUM_THREADS];
|
||||
static unsigned int seeds[3];
|
||||
static RADIX_TREE(tree, GFP_KERNEL);
|
||||
static DEFINE_XARRAY(array);
|
||||
static bool test_complete;
|
||||
static int max_order;
|
||||
|
||||
/* relentlessly fill the tree with tagged entries */
|
||||
void my_item_insert(struct xarray *xa, unsigned long index)
|
||||
{
|
||||
XA_STATE(xas, xa, index);
|
||||
struct item *item = item_create(index, 0);
|
||||
int order;
|
||||
|
||||
retry:
|
||||
xas_lock(&xas);
|
||||
for (order = max_order; order >= 0; order--) {
|
||||
xas_set_order(&xas, index, order);
|
||||
item->order = order;
|
||||
if (xas_find_conflict(&xas))
|
||||
continue;
|
||||
xas_store(&xas, item);
|
||||
xas_set_mark(&xas, TAG);
|
||||
break;
|
||||
}
|
||||
xas_unlock(&xas);
|
||||
if (xas_nomem(&xas, GFP_KERNEL))
|
||||
goto retry;
|
||||
if (order < 0)
|
||||
free(item);
|
||||
}
|
||||
|
||||
/* relentlessly fill the array with tagged entries */
|
||||
static void *add_entries_fn(void *arg)
|
||||
{
|
||||
rcu_register_thread();
|
||||
|
||||
while (!test_complete) {
|
||||
unsigned long pgoff;
|
||||
int order;
|
||||
|
||||
for (pgoff = 0; pgoff < MAX_IDX; pgoff++) {
|
||||
xa_lock(&tree);
|
||||
for (order = max_order; order >= 0; order--) {
|
||||
if (item_insert_order(&tree, pgoff, order)
|
||||
== 0) {
|
||||
item_tag_set(&tree, pgoff, TAG);
|
||||
break;
|
||||
}
|
||||
}
|
||||
xa_unlock(&tree);
|
||||
my_item_insert(&array, pgoff);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -55,33 +69,25 @@ static void *add_entries_fn(void *arg)
|
|||
}
|
||||
|
||||
/*
|
||||
* Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find
|
||||
* things that have been removed and randomly resetting our iteration to the
|
||||
* next chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and
|
||||
* radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
|
||||
* NULL 'slot' variable.
|
||||
* Iterate over tagged entries, retrying when we find ourselves in a deleted
|
||||
* node and randomly pausing the iteration.
|
||||
*/
|
||||
static void *tagged_iteration_fn(void *arg)
|
||||
{
|
||||
struct radix_tree_iter iter;
|
||||
void **slot;
|
||||
XA_STATE(xas, &array, 0);
|
||||
void *entry;
|
||||
|
||||
rcu_register_thread();
|
||||
|
||||
while (!test_complete) {
|
||||
xas_set(&xas, 0);
|
||||
rcu_read_lock();
|
||||
radix_tree_for_each_tagged(slot, &tree, &iter, 0, TAG) {
|
||||
void *entry = radix_tree_deref_slot(slot);
|
||||
if (unlikely(!entry))
|
||||
xas_for_each_marked(&xas, entry, ULONG_MAX, TAG) {
|
||||
if (xas_retry(&xas, entry))
|
||||
continue;
|
||||
|
||||
if (radix_tree_deref_retry(entry)) {
|
||||
slot = radix_tree_iter_retry(&iter);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (rand_r(&seeds[0]) % 50 == 0) {
|
||||
slot = radix_tree_iter_resume(slot, &iter);
|
||||
xas_pause(&xas);
|
||||
rcu_read_unlock();
|
||||
rcu_barrier();
|
||||
rcu_read_lock();
|
||||
|
@ -96,33 +102,25 @@ static void *tagged_iteration_fn(void *arg)
|
|||
}
|
||||
|
||||
/*
|
||||
* Iterate over the entries, doing a radix_tree_iter_retry() as we find things
|
||||
* that have been removed and randomly resetting our iteration to the next
|
||||
* chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and
|
||||
* radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
|
||||
* NULL 'slot' variable.
|
||||
* Iterate over the entries, retrying when we find ourselves in a deleted
|
||||
* node and randomly pausing the iteration.
|
||||
*/
|
||||
static void *untagged_iteration_fn(void *arg)
|
||||
{
|
||||
struct radix_tree_iter iter;
|
||||
void **slot;
|
||||
XA_STATE(xas, &array, 0);
|
||||
void *entry;
|
||||
|
||||
rcu_register_thread();
|
||||
|
||||
while (!test_complete) {
|
||||
xas_set(&xas, 0);
|
||||
rcu_read_lock();
|
||||
radix_tree_for_each_slot(slot, &tree, &iter, 0) {
|
||||
void *entry = radix_tree_deref_slot(slot);
|
||||
if (unlikely(!entry))
|
||||
xas_for_each(&xas, entry, ULONG_MAX) {
|
||||
if (xas_retry(&xas, entry))
|
||||
continue;
|
||||
|
||||
if (radix_tree_deref_retry(entry)) {
|
||||
slot = radix_tree_iter_retry(&iter);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (rand_r(&seeds[1]) % 50 == 0) {
|
||||
slot = radix_tree_iter_resume(slot, &iter);
|
||||
xas_pause(&xas);
|
||||
rcu_read_unlock();
|
||||
rcu_barrier();
|
||||
rcu_read_lock();
|
||||
|
@ -137,7 +135,7 @@ static void *untagged_iteration_fn(void *arg)
|
|||
}
|
||||
|
||||
/*
|
||||
* Randomly remove entries to help induce radix_tree_iter_retry() calls in the
|
||||
* Randomly remove entries to help induce retries in the
|
||||
* two iteration functions.
|
||||
*/
|
||||
static void *remove_entries_fn(void *arg)
|
||||
|
@ -146,12 +144,13 @@ static void *remove_entries_fn(void *arg)
|
|||
|
||||
while (!test_complete) {
|
||||
int pgoff;
|
||||
struct item *item;
|
||||
|
||||
pgoff = rand_r(&seeds[2]) % MAX_IDX;
|
||||
|
||||
xa_lock(&tree);
|
||||
item_delete(&tree, pgoff);
|
||||
xa_unlock(&tree);
|
||||
item = xa_erase(&array, pgoff);
|
||||
if (item)
|
||||
item_free(item, pgoff);
|
||||
}
|
||||
|
||||
rcu_unregister_thread();
|
||||
|
@ -164,7 +163,7 @@ static void *tag_entries_fn(void *arg)
|
|||
rcu_register_thread();
|
||||
|
||||
while (!test_complete) {
|
||||
tag_tagged_items(&tree, 0, MAX_IDX, 10, TAG, NEW_TAG);
|
||||
tag_tagged_items(&array, 0, MAX_IDX, 10, TAG, NEW_TAG);
|
||||
}
|
||||
rcu_unregister_thread();
|
||||
return NULL;
|
||||
|
@ -215,5 +214,5 @@ void iteration_test(unsigned order, unsigned test_duration)
|
|||
}
|
||||
}
|
||||
|
||||
item_kill_tree(&tree);
|
||||
item_kill_tree(&array);
|
||||
}
|
||||
|
|
|
@ -63,16 +63,21 @@ void item_sanity(struct item *item, unsigned long index)
|
|||
assert((item->index | mask) == (index | mask));
|
||||
}
|
||||
|
||||
void item_free(struct item *item, unsigned long index)
|
||||
{
|
||||
item_sanity(item, index);
|
||||
free(item);
|
||||
}
|
||||
|
||||
int item_delete(struct radix_tree_root *root, unsigned long index)
|
||||
{
|
||||
struct item *item = radix_tree_delete(root, index);
|
||||
|
||||
if (item) {
|
||||
item_sanity(item, index);
|
||||
free(item);
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
if (!item)
|
||||
return 0;
|
||||
|
||||
item_free(item, index);
|
||||
return 1;
|
||||
}
|
||||
|
||||
static void item_free_rcu(struct rcu_head *head)
|
||||
|
|
|
@ -14,6 +14,7 @@ struct item *item_create(unsigned long index, unsigned int order);
|
|||
int __item_insert(struct radix_tree_root *root, struct item *item);
|
||||
int item_insert(struct radix_tree_root *root, unsigned long index);
|
||||
void item_sanity(struct item *item, unsigned long index);
|
||||
void item_free(struct item *item, unsigned long index);
|
||||
int item_insert_order(struct radix_tree_root *root, unsigned long index,
|
||||
unsigned order);
|
||||
int item_delete(struct radix_tree_root *root, unsigned long index);
|
||||
|
|
Загрузка…
Ссылка в новой задаче