Lines Matching refs:P

49 pub struct SCAllocator<'a, P: AllocablePage> {
57 pub(crate) empty_slabs: PageList<'a, P>,
59 pub(crate) slabs: PageList<'a, P>,
61 pub(crate) full_slabs: PageList<'a, P>,
72 let obj_per_page = cmin((P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD) / $size, 8 * 64);
87 impl<'a, P: AllocablePage> SCAllocator<'a, P> {
92 pub const fn new(size: usize) -> SCAllocator<'a, P> { in new() argument
97 pub fn new(size: usize) -> SCAllocator<'a, P> { in new() argument
107 fn insert_partial_slab(&mut self, new_head: &'a mut P) { in insert_partial_slab() argument
112 fn insert_empty(&mut self, new_head: &'a mut P) { in insert_empty() argument
114 new_head as *const P as usize % P::SIZE, in insert_empty()
143 fn move_to_empty(&mut self, page: &'a mut P) { in move_to_empty() argument
144 let page_ptr = page as *const P; in move_to_empty() constant
161 fn move_partial_to_full(&mut self, page: &'a mut P) { in move_partial_to_full() argument
162 let page_ptr = page as *const P; in move_partial_to_full() constant
175 fn move_full_to_partial(&mut self, page: &'a mut P) { in move_full_to_partial() argument
176 let page_ptr = page as *const P; in move_full_to_partial() constant
215 if self.allocation_count > SCAllocator::<P>::REBALANCE_COUNT { in try_allocate_from_pagelist()
225 F: FnMut(*mut P), in try_reclaim_pages()
231 dealloc(page as *mut P); in try_reclaim_pages()
245 pub unsafe fn refill(&mut self, page: &'a mut P) { in refill() argument
247 .initialize(self.size, P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD); in refill()
269 assert!(self.size <= (P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD)); in allocate()
324 assert!(self.size <= (P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD)); in deallocate()
330 P::SIZE in deallocate()
333 let page = (ptr.as_ptr() as usize) & !(P::SIZE - 1); in deallocate()
337 let slab_page = unsafe { mem::transmute::<VAddr, &'a mut P>(page) }; in deallocate()
349 slab_callback.free_slab_page(slab_page as *const P as *mut u8, P::SIZE); in deallocate() constant