Lines Matching refs:locks
384 struct ww_mutex *locks; member
422 struct ww_mutex *locks = stress->locks; in stress_inorder_work() local
441 err = ww_mutex_lock(&locks[order[n]], &ctx); in stress_inorder_work()
449 ww_mutex_unlock(&locks[order[contended]]); in stress_inorder_work()
452 ww_mutex_unlock(&locks[order[n]]); in stress_inorder_work()
455 ww_mutex_lock_slow(&locks[order[contended]], &ctx); in stress_inorder_work()
479 LIST_HEAD(locks); in stress_reorder_work()
494 ll->lock = &stress->locks[order[n]]; in stress_reorder_work()
495 list_add(&ll->link, &locks); in stress_reorder_work()
503 list_for_each_entry(ll, &locks, link) { in stress_reorder_work()
509 list_for_each_entry_continue_reverse(ln, &locks, link) in stress_reorder_work()
519 list_move(&ll->link, &locks); /* restarts iteration */ in stress_reorder_work()
523 list_for_each_entry(ll, &locks, link) in stress_reorder_work()
530 list_for_each_entry_safe(ll, ln, &locks, link) in stress_reorder_work()
539 struct ww_mutex *lock = stress->locks + get_random_u32_below(nlocks); in stress_one_work()
562 struct ww_mutex *locks; in stress() local
566 locks = kmalloc_array(nlocks, sizeof(*locks), GFP_KERNEL); in stress()
567 if (!locks) in stress()
573 kfree(locks); in stress()
578 ww_mutex_init(&locks[n], &ww_class); in stress()
607 stress->locks = locks; in stress()
618 ww_mutex_destroy(&locks[n]); in stress()
620 kfree(locks); in stress()