Lines Matching refs:kmalloc_cache_group

4 struct slab kmalloc_cache_group[16] =  variable
354 spin_init(&kmalloc_cache_group[i].lock); in slab_init()
356kmalloc_cache_group[i].cache_pool_entry = (struct slab_obj *)memory_management_struct.end_of_struc… in slab_init()
360 list_init(&kmalloc_cache_group[i].cache_pool_entry->list); in slab_init()
363 kmalloc_cache_group[i].cache_pool_entry->count_using = 0; in slab_init()
364kmalloc_cache_group[i].cache_pool_entry->count_free = PAGE_2M_SIZE / kmalloc_cache_group[i].size; in slab_init()
365kmalloc_cache_group[i].cache_pool_entry->bmp_len = (((kmalloc_cache_group[i].cache_pool_entry->cou… in slab_init()
366kmalloc_cache_group[i].cache_pool_entry->bmp_count = kmalloc_cache_group[i].cache_pool_entry->coun… in slab_init()
369 kmalloc_cache_group[i].cache_pool_entry->bmp = (ul *)memory_management_struct.end_of_struct; in slab_init()
372 …ruct.end_of_struct = (ul)(memory_management_struct.end_of_struct + kmalloc_cache_group[i].cache_po… in slab_init()
375 …memset(kmalloc_cache_group[i].cache_pool_entry->bmp, 0xff, kmalloc_cache_group[i].cache_pool_entry… in slab_init()
376 for (int j = 0; j < kmalloc_cache_group[i].cache_pool_entry->bmp_count; ++j) in slab_init()
377 *(kmalloc_cache_group[i].cache_pool_entry->bmp + (j >> 6)) ^= 1UL << (j % 64); in slab_init()
379 kmalloc_cache_group[i].count_total_using = 0; in slab_init()
380kmalloc_cache_group[i].count_total_free = kmalloc_cache_group[i].cache_pool_entry->count_free; in slab_init()
421 kmalloc_cache_group[i].cache_pool_entry->page = page; in slab_init()
423 kmalloc_cache_group[i].cache_pool_entry->vaddr = virt; in slab_init()
551 if (kmalloc_cache_group[i].size >= size) in kmalloc()
558 spin_lock(&kmalloc_cache_group[index].lock); in kmalloc()
560 struct slab_obj *slab_obj_ptr = kmalloc_cache_group[index].cache_pool_entry; in kmalloc()
563 if (unlikely(kmalloc_cache_group[index].count_total_free == 0)) in kmalloc()
566 slab_obj_ptr = kmalloc_create_slab_obj(kmalloc_cache_group[index].size); in kmalloc()
575 kmalloc_cache_group[index].count_total_free += slab_obj_ptr->count_free; in kmalloc()
576 list_add(&kmalloc_cache_group[index].cache_pool_entry->list, &slab_obj_ptr->list); in kmalloc()
587 } while (slab_obj_ptr != kmalloc_cache_group[index].cache_pool_entry); in kmalloc()
608 --kmalloc_cache_group[index].count_total_free; in kmalloc()
609 ++kmalloc_cache_group[index].count_total_using; in kmalloc()
611 spin_unlock(&kmalloc_cache_group[index].lock); in kmalloc()
613 result = (void *)((char *)slab_obj_ptr->vaddr + kmalloc_cache_group[index].size * i); in kmalloc()
623 spin_unlock(&kmalloc_cache_group[index].lock); in kmalloc()
647 slab_obj_ptr = kmalloc_cache_group[i].cache_pool_entry; in kfree()
659 spin_lock(&kmalloc_cache_group[i].lock); in kfree()
661 index = (address - slab_obj_ptr->vaddr) / kmalloc_cache_group[i].size; in kfree()
668 ++kmalloc_cache_group[i].count_total_free; in kfree()
669 --kmalloc_cache_group[i].count_total_using; in kfree()
673 …tr->count_using == 0) && (kmalloc_cache_group[i].count_total_free >= ((slab_obj_ptr->bmp_count) <<… in kfree()
675 switch (kmalloc_cache_group[i].size) in kfree()
685 kmalloc_cache_group[i].count_total_free -= slab_obj_ptr->bmp_count; in kfree()
693 kmalloc_cache_group[i].count_total_free -= slab_obj_ptr->bmp_count; in kfree()
705 spin_unlock(&kmalloc_cache_group[i].lock); in kfree()
709 } while (slab_obj_ptr != kmalloc_cache_group[i].cache_pool_entry); in kfree()