Lines Matching refs:page

49 		struct page *page;  in flush_all_zero_pkmaps()  local
72 page = pte_page(pkmap_page_table[i]); in flush_all_zero_pkmaps()
75 page->virtual = NULL; in flush_all_zero_pkmaps()
80 static inline unsigned long map_new_virtual(struct page *page, int nonblocking) in map_new_virtual() argument
116 if (page->virtual) in map_new_virtual()
117 return (unsigned long) page->virtual; in map_new_virtual()
124 set_pte(&(pkmap_page_table[last_pkmap_nr]), mk_pte(page, kmap_prot)); in map_new_virtual()
127 page->virtual = (void *) vaddr; in map_new_virtual()
132 void fastcall *kmap_high(struct page *page, int nonblocking) in kmap_high() argument
143 vaddr = (unsigned long) page->virtual; in kmap_high()
145 vaddr = map_new_virtual(page, nonblocking); in kmap_high()
157 void fastcall kunmap_high(struct page *page) in kunmap_high() argument
164 vaddr = (unsigned long) page->virtual; in kunmap_high()
218 struct page *p_from; in copy_from_high_bh()
231 struct page *p_to; in copy_to_high_bh_irq()
246 struct page *page; in bounce_end_io() local
252 page = bh->b_page; in bounce_end_io()
256 __free_page(page); in bounce_end_io()
262 list_add(&page->list, &emergency_pages); in bounce_end_io()
293 struct page * page = alloc_page(GFP_ATOMIC); in init_emergency_pool() local
294 if (!page) { in init_emergency_pool()
298 list_add(&page->list, &emergency_pages); in init_emergency_pool()
333 struct page *alloc_bounce_page (void) in alloc_bounce_page()
336 struct page *page; in alloc_bounce_page() local
338 page = alloc_page(GFP_NOHIGHIO); in alloc_bounce_page()
339 if (page) in alloc_bounce_page()
340 return page; in alloc_bounce_page()
354 page = list_entry(tmp->next, struct page, list); in alloc_bounce_page()
359 if (page) in alloc_bounce_page()
360 return page; in alloc_bounce_page()
407 struct page *page; in create_bounce() local
420 page = alloc_bounce_page(); in create_bounce()
422 set_bh_page(bh, page, 0); in create_bounce()