Lines Matching refs:size

100 static bool assert_continuous(struct kunit *test, const struct drm_mm *mm, u64 size)  in assert_continuous()  argument
118 if (node->size != size) { in assert_continuous()
120 n, size, node->size); in assert_continuous()
130 drm_mm_for_each_node_in_range(check, mm, addr, addr + size) { in assert_continuous()
140 KUNIT_FAIL(test, "lookup failed for node %llx + %llx\n", addr, size); in assert_continuous()
144 addr += size; in assert_continuous()
163 u64 size, u64 alignment, unsigned long color) in assert_node() argument
172 if (node->size != size) { in assert_node()
174 node->size, size); in assert_node()
196 const unsigned int size = 4096; in drm_test_mm_init() local
206 drm_mm_init(&mm, 0, size); in drm_test_mm_init()
218 if (!assert_one_hole(test, &mm, 0, size)) { in drm_test_mm_init()
225 tmp.size = size; in drm_test_mm_init()
239 if (!assert_one_hole(test, &mm, 0, size)) { in drm_test_mm_init()
261 nodes[0].size = 1024; in drm_test_mm_debug()
264 nodes[0].start, nodes[0].size); in drm_test_mm_debug()
266 nodes[1].size = 1024; in drm_test_mm_debug()
267 nodes[1].start = 4096 - 512 - nodes[1].size; in drm_test_mm_debug()
270 nodes[0].start, nodes[0].size); in drm_test_mm_debug()
274 u64 start, u64 size) in set_node() argument
277 node->size = size; in set_node()
291 node->start, node->size); in expect_reserve_fail()
296 err, -ENOSPC, node->start, node->size); in expect_reserve_fail()
303 u64 size) in check_reserve_boundaries() argument
306 u64 start, size; in check_reserve_boundaries() member
311 B(-size, 0), in check_reserve_boundaries()
312 B(size, 0), in check_reserve_boundaries()
313 B(size * count, 0), in check_reserve_boundaries()
314 B(-size, size), in check_reserve_boundaries()
315 B(-size, -size), in check_reserve_boundaries()
316 B(-size, 2 * size), in check_reserve_boundaries()
317 B(0, -size), in check_reserve_boundaries()
318 B(size, -size), in check_reserve_boundaries()
319 B(count * size, size), in check_reserve_boundaries()
320 B(count * size, -size), in check_reserve_boundaries()
321 B(count * size, count * size), in check_reserve_boundaries()
322 B(count * size, -count * size), in check_reserve_boundaries()
323 B(count * size, -(count + 1) * size), in check_reserve_boundaries()
324 B((count + 1) * size, size), in check_reserve_boundaries()
325 B((count + 1) * size, -size), in check_reserve_boundaries()
326 B((count + 1) * size, -2 * size), in check_reserve_boundaries()
334 boundaries[n].size))) { in check_reserve_boundaries()
336 n, boundaries[n].name, count, size); in check_reserve_boundaries()
344 static int __drm_test_mm_reserve(struct kunit *test, unsigned int count, u64 size) in __drm_test_mm_reserve() argument
359 DRM_MM_BUG_ON(!size); in __drm_test_mm_reserve()
370 drm_mm_init(&mm, 0, count * size); in __drm_test_mm_reserve()
372 if (!check_reserve_boundaries(test, &mm, count, size)) in __drm_test_mm_reserve()
376 nodes[n].start = order[n] * size; in __drm_test_mm_reserve()
377 nodes[n].size = size; in __drm_test_mm_reserve()
398 if (!assert_continuous(test, &mm, size)) in __drm_test_mm_reserve()
404 if (!expect_reserve_fail(test, &mm, set_node(&tmp, order[n] * size, 1))) in __drm_test_mm_reserve()
418 if (!assert_continuous(test, &mm, size)) in __drm_test_mm_reserve()
423 if (!expect_reserve_fail(test, &mm, set_node(&tmp, 0, size * count))) in __drm_test_mm_reserve()
427 if (!expect_reserve_fail(test, &mm, set_node(&tmp, size * n, size * (count - n)))) in __drm_test_mm_reserve()
451 if (!assert_continuous(test, &mm, size)) in __drm_test_mm_reserve()
472 u64 size = BIT_ULL(n); in drm_test_mm_reserve() local
474 KUNIT_ASSERT_FALSE(test, __drm_test_mm_reserve(test, count, size - 1)); in drm_test_mm_reserve()
475 KUNIT_ASSERT_FALSE(test, __drm_test_mm_reserve(test, count, size)); in drm_test_mm_reserve()
476 KUNIT_ASSERT_FALSE(test, __drm_test_mm_reserve(test, count, size + 1)); in drm_test_mm_reserve()
483 struct drm_mm_node *node, u64 size, u64 alignment, unsigned long color, in expect_insert() argument
489 size, alignment, color, in expect_insert()
494 size, alignment, color, mode->name, err); in expect_insert()
498 if (!assert_node(test, node, mm, size, alignment, color)) { in expect_insert()
506 static bool expect_insert_fail(struct kunit *test, struct drm_mm *mm, u64 size) in expect_insert_fail() argument
511 err = drm_mm_insert_node(mm, &tmp, size); in expect_insert_fail()
517 tmp.start, tmp.size); in expect_insert_fail()
522 err, -ENOSPC, size); in expect_insert_fail()
527 static int __drm_test_mm_insert(struct kunit *test, unsigned int count, u64 size, bool replace) in __drm_test_mm_insert() argument
539 DRM_MM_BUG_ON(!size); in __drm_test_mm_insert()
550 drm_mm_init(&mm, 0, count * size); in __drm_test_mm_insert()
558 if (!expect_insert(test, &mm, node, size, 0, n, mode)) { in __drm_test_mm_insert()
560 mode->name, size, n); in __drm_test_mm_insert()
573 if (!assert_node(test, &nodes[n], &mm, size, 0, n)) { in __drm_test_mm_insert()
576 size, n); in __drm_test_mm_insert()
583 tmp.start, size, nodes[n].start, nodes[n].size); in __drm_test_mm_insert()
590 if (!assert_continuous(test, &mm, size)) in __drm_test_mm_insert()
594 if (!expect_insert_fail(test, &mm, size)) in __drm_test_mm_insert()
602 if (!expect_insert(test, &mm, &nodes[n], size, 0, n, mode)) { in __drm_test_mm_insert()
604 mode->name, size, n); in __drm_test_mm_insert()
615 if (!assert_continuous(test, &mm, size)) in __drm_test_mm_insert()
628 if (!expect_insert(test, &mm, node, size, 0, n, mode)) { in __drm_test_mm_insert()
631 mode->name, size, n); in __drm_test_mm_insert()
638 if (!assert_continuous(test, &mm, size)) in __drm_test_mm_insert()
641 if (!expect_insert_fail(test, &mm, size)) in __drm_test_mm_insert()
669 u64 size = BIT_ULL(n); in drm_test_mm_insert() local
671 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert(test, count, size - 1, false)); in drm_test_mm_insert()
672 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert(test, count, size, false)); in drm_test_mm_insert()
673 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert(test, count, size + 1, false)); in drm_test_mm_insert()
691 u64 size = BIT_ULL(n); in drm_test_mm_replace() local
693 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert(test, count, size - 1, true)); in drm_test_mm_replace()
694 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert(test, count, size, true)); in drm_test_mm_replace()
695 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert(test, count, size + 1, true)); in drm_test_mm_replace()
702 u64 size, u64 alignment, unsigned long color, in expect_insert_in_range() argument
708 size, alignment, color, in expect_insert_in_range()
714 size, alignment, color, mode->name, in expect_insert_in_range()
719 if (!assert_node(test, node, mm, size, alignment, color)) { in expect_insert_in_range()
728 u64 size, u64 range_start, u64 range_end) in expect_insert_in_range_fail() argument
733 err = drm_mm_insert_node_in_range(mm, &tmp, size, 0, 0, range_start, range_end, in expect_insert_in_range_fail()
741 tmp.start, tmp.size, range_start, range_end); in expect_insert_in_range_fail()
746 err, -ENOSPC, size, range_start, range_end); in expect_insert_in_range_fail()
753 u64 size, u64 start, u64 end) in assert_contiguous_in_range() argument
758 if (!expect_insert_in_range_fail(test, mm, size, start, end)) in assert_contiguous_in_range()
761 n = div64_u64(start + size - 1, size); in assert_contiguous_in_range()
763 if (node->start < start || node->start + node->size > end) { in assert_contiguous_in_range()
766 n, node->start, node->start + node->size, start, end); in assert_contiguous_in_range()
770 if (node->start != n * size) { in assert_contiguous_in_range()
772 n, n * size, node->start); in assert_contiguous_in_range()
776 if (node->size != size) { in assert_contiguous_in_range()
778 n, size, node->size); in assert_contiguous_in_range()
794 node->start, node->size, start); in assert_contiguous_in_range()
803 node->start, node->size, end); in assert_contiguous_in_range()
811 static int __drm_test_mm_insert_range(struct kunit *test, unsigned int count, u64 size, in __drm_test_mm_insert_range() argument
821 DRM_MM_BUG_ON(!size); in __drm_test_mm_insert_range()
833 drm_mm_init(&mm, 0, count * size); in __drm_test_mm_insert_range()
835 start_n = div64_u64(start + size - 1, size); in __drm_test_mm_insert_range()
836 end_n = div64_u64(end - size, size); in __drm_test_mm_insert_range()
840 if (!expect_insert_in_range(test, &mm, &nodes[n], size, size, n, in __drm_test_mm_insert_range()
844 mode->name, size, n, start_n, end_n, start, end); in __drm_test_mm_insert_range()
849 if (!assert_contiguous_in_range(test, &mm, size, start, end)) { in __drm_test_mm_insert_range()
852 mode->name, start, end, size); in __drm_test_mm_insert_range()
861 if (!expect_insert_in_range(test, &mm, &nodes[n], size, size, n, in __drm_test_mm_insert_range()
875 if (!assert_contiguous_in_range(test, &mm, size, start, end)) { in __drm_test_mm_insert_range()
878 mode->name, start, end, size); in __drm_test_mm_insert_range()
903 const unsigned int size = end - start; in insert_outside_range() local
905 drm_mm_init(&mm, start, size); in insert_outside_range()
910 if (!expect_insert_in_range_fail(test, &mm, size, in insert_outside_range()
911 start - size / 2, start + (size + 1) / 2)) in insert_outside_range()
914 if (!expect_insert_in_range_fail(test, &mm, size, in insert_outside_range()
915 end - (size + 1) / 2, end + size / 2)) in insert_outside_range()
918 if (!expect_insert_in_range_fail(test, &mm, 1, end, end + size)) in insert_outside_range()
934 const u64 size = BIT_ULL(n); in drm_test_mm_insert_range() local
935 const u64 max = count * size; in drm_test_mm_insert_range()
937 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert_range(test, count, size, 0, max)); in drm_test_mm_insert_range()
938 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert_range(test, count, size, 1, max)); in drm_test_mm_insert_range()
939 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert_range(test, count, size, 0, max - 1)); in drm_test_mm_insert_range()
940 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert_range(test, count, size, 0, max / 2)); in drm_test_mm_insert_range()
941 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert_range(test, count, size, in drm_test_mm_insert_range()
943 KUNIT_ASSERT_FALSE(test, __drm_test_mm_insert_range(test, count, size, in drm_test_mm_insert_range()
953 unsigned int size = 4096; in prepare_frag() local
957 if (!expect_insert(test, mm, &nodes[i], size, 0, i, mode) != 0) { in prepare_frag()
976 unsigned int size = 8192; in get_insert_time() local
982 if (!expect_insert(test, mm, &nodes[i], size, 0, i, mode) != 0) { in get_insert_time()
1075 u64 size = next_prime_number(prime); in drm_test_mm_align() local
1077 if (!expect_insert(test, &mm, &nodes[i], size, prime, i, mode)) { in drm_test_mm_align()
1111 u64 align, size; in drm_test_mm_align_pot() local
1120 size = BIT_ULL(bit - 1) + 1; in drm_test_mm_align_pot()
1121 if (!expect_insert(test, &mm, node, size, align, bit, &insert_modes[0])) { in drm_test_mm_align_pot()
1150 scan->hit_start, scan->hit_end, scan->size, scan->alignment, scan->color); in show_scan()
1164 hole->start, hole->size, hole->color); in show_holes()
1168 next->start, next->size, next->color); in show_holes()
1206 scan->size, count, scan->alignment, scan->color); in evict_nodes()
1275 return assert_continuous(test, mm, nodes[0].node.size); in evict_nothing()
1323 return assert_continuous(test, mm, nodes[0].node.size); in evict_everything()
1328 unsigned int *order, unsigned int count, unsigned int size, in evict_something() argument
1337 drm_mm_scan_init_with_range(&scan, mm, size, alignment, 0, range_start, in evict_something()
1343 err = drm_mm_insert_node_generic(mm, &tmp, size, alignment, 0, in evict_something()
1347 size, alignment); in evict_something()
1353 if (tmp.start < range_start || tmp.start + tmp.size > range_end) { in evict_something()
1356 tmp.start, tmp.size, range_start, range_end); in evict_something()
1360 if (!assert_node(test, &tmp, mm, size, alignment, 0) || in evict_something()
1364 tmp.size, size, alignment, misalignment(&tmp, alignment), in evict_something()
1382 if (!assert_continuous(test, mm, nodes[0].node.size)) { in evict_something()
1393 const unsigned int size = 8192; in drm_test_mm_evict() local
1407 nodes = vzalloc(array_size(size, sizeof(*nodes))); in drm_test_mm_evict()
1410 order = drm_random_order(size, &prng); in drm_test_mm_evict()
1414 drm_mm_init(&mm, 0, size); in drm_test_mm_evict()
1415 for (n = 0; n < size; n++) { in drm_test_mm_evict()
1423 if (!evict_nothing(test, &mm, size, nodes)) { in drm_test_mm_evict()
1427 if (!evict_everything(test, &mm, size, nodes)) { in drm_test_mm_evict()
1433 for (n = 1; n <= size; n <<= 1) { in drm_test_mm_evict()
1434 drm_random_reorder(order, size, &prng); in drm_test_mm_evict()
1435 if (evict_something(test, &mm, 0, U64_MAX, nodes, order, size, n, 1, in drm_test_mm_evict()
1443 for (n = 1; n < size; n <<= 1) { in drm_test_mm_evict()
1444 drm_random_reorder(order, size, &prng); in drm_test_mm_evict()
1445 if (evict_something(test, &mm, 0, U64_MAX, nodes, order, size, in drm_test_mm_evict()
1446 size / 2, n, mode)) { in drm_test_mm_evict()
1449 mode->name, size / 2, n); in drm_test_mm_evict()
1454 for_each_prime_number_from(n, 1, min(size, max_prime)) { in drm_test_mm_evict()
1455 unsigned int nsize = (size - n + 1) / 2; in drm_test_mm_evict()
1459 drm_random_reorder(order, size, &prng); in drm_test_mm_evict()
1460 if (evict_something(test, &mm, 0, U64_MAX, nodes, order, size, in drm_test_mm_evict()
1484 const unsigned int size = 8192; in drm_test_mm_evict_range() local
1485 const unsigned int range_size = size / 2; in drm_test_mm_evict_range()
1486 const unsigned int range_start = size / 4; in drm_test_mm_evict_range()
1498 nodes = vzalloc(array_size(size, sizeof(*nodes))); in drm_test_mm_evict_range()
1501 order = drm_random_order(size, &prng); in drm_test_mm_evict_range()
1505 drm_mm_init(&mm, 0, size); in drm_test_mm_evict_range()
1506 for (n = 0; n < size; n++) { in drm_test_mm_evict_range()
1515 drm_random_reorder(order, size, &prng); in drm_test_mm_evict_range()
1517 order, size, n, 1, mode)) { in drm_test_mm_evict_range()
1526 drm_random_reorder(order, size, &prng); in drm_test_mm_evict_range()
1528 order, size, range_size / 2, n, mode)) { in drm_test_mm_evict_range()
1541 drm_random_reorder(order, size, &prng); in drm_test_mm_evict_range()
1543 order, size, nsize, n, mode)) { in drm_test_mm_evict_range()
1565 return div64_u64(node->start, node->size); in node_index()
1574 unsigned int size; in drm_test_mm_topdown() local
1596 for (size = 1; size <= 64; size <<= 1) { in drm_test_mm_topdown()
1597 drm_mm_init(&mm, 0, size * count); in drm_test_mm_topdown()
1599 if (!expect_insert(test, &mm, &nodes[n], size, 0, n, topdown)) { in drm_test_mm_topdown()
1600 KUNIT_FAIL(test, "insert failed, size %u step %d\n", size, n); in drm_test_mm_topdown()
1607 n, nodes[n].start, size); in drm_test_mm_topdown()
1611 if (!assert_one_hole(test, &mm, 0, size * (count - n - 1))) in drm_test_mm_topdown()
1615 if (!assert_continuous(test, &mm, size)) in drm_test_mm_topdown()
1630 if (!expect_insert(test, &mm, node, size, 0, 0, topdown)) { in drm_test_mm_topdown()
1646 m, n, size, last, node_index(node)); in drm_test_mm_topdown()
1681 unsigned int size; in drm_test_mm_bottomup() local
1702 for (size = 1; size <= 64; size <<= 1) { in drm_test_mm_bottomup()
1703 drm_mm_init(&mm, 0, size * count); in drm_test_mm_bottomup()
1705 if (!expect_insert(test, &mm, &nodes[n], size, 0, n, bottomup)) { in drm_test_mm_bottomup()
1707 "bottomup insert failed, size %u step %d\n", size, n); in drm_test_mm_bottomup()
1711 if (!assert_one_hole(test, &mm, size * (n + 1), size * count)) in drm_test_mm_bottomup()
1715 if (!assert_continuous(test, &mm, size)) in drm_test_mm_bottomup()
1730 if (!expect_insert(test, &mm, node, size, 0, 0, bottomup)) { in drm_test_mm_bottomup()
1776 rsvd_lo.size = 1; in drm_test_mm_once()
1784 rsvd_hi.size = 1; in drm_test_mm_once()
1836 node->color, node->start, node->size, in colors_abutt()
1839 list_next_entry(node, node_list)->size); in colors_abutt()
1876 if (node->color != node->size) { in drm_test_mm_color()
1878 node->size, node->color); in drm_test_mm_color()
1896 node->size = 1 + 2 * count; in drm_test_mm_color()
1897 node->color = node->size; in drm_test_mm_color()
1904 last = node->start + node->size; in drm_test_mm_color()
1914 node->size = n + count; in drm_test_mm_color()
1915 node->color = node->size; in drm_test_mm_color()
1931 last = node->start + node->size; in drm_test_mm_color()
1949 if (node->color != node->size) { in drm_test_mm_color()
1952 mode->name, node->size, node->color); in drm_test_mm_color()
1960 div64_u64_rem(node->start, node->size, &rem); in drm_test_mm_color()
1964 mode->name, node->start, node->size, rem); in drm_test_mm_color()
1985 unsigned int count, unsigned int size, unsigned int alignment, in evict_color() argument
1994 drm_mm_scan_init_with_range(&scan, mm, size, alignment, color, range_start, in evict_color()
2000 err = drm_mm_insert_node_generic(mm, &tmp, size, alignment, color, in evict_color()
2005 size, alignment, color, err); in evict_color()
2011 if (tmp.start < range_start || tmp.start + tmp.size > range_end) { in evict_color()
2014 tmp.start, tmp.size, range_start, range_end); in evict_color()
2021 if (!assert_node(test, &tmp, mm, size, alignment, color)) { in evict_color()
2024 tmp.size, size, alignment, misalignment(&tmp, alignment), tmp.start); in evict_color()