Searched refs:ESID_MASK (Results 1 – 9 of 9) sorted by relevance
43 #define ESID_MASK 0xf0000000 macro
31 #define ESID_MASK 0xfffffffff0000000UL macro
229 else if ((svcpu->slb[i].esid & ESID_MASK) == esid) { in kvmppc_mmu_next_segment()262 u64 slb_esid = (eaddr & ESID_MASK) | SLB_ESID_V; in kvmppc_mmu_map_segment()269 slb_index = kvmppc_mmu_next_segment(vcpu, eaddr & ESID_MASK); in kvmppc_mmu_map_segment()
123 page = (eaddr & ~ESID_MASK) >> 12; in kvmppc_mmu_get_pteg()176 va = (vsid << SID_SHIFT) | (eaddr & ~ESID_MASK); in kvmppc_mmu_map_page()
344 slbe->orige = rb & (ESID_MASK | SLB_ESID_V); in kvmppc_mmu_book3s_64_slbmte()
356 mask = ESID_MASK; in kvmppc_mmu_book3s_hv_find_slbe()
86 if ((castout_ste->esid_data & ESID_MASK) != PAGE_OFFSET) in make_ste()201 ea = ste->esid_data & ESID_MASK; in switch_stab()
41 (((ssize) == MMU_SEGSIZE_256M)? ESID_MASK: ESID_MASK_1T)
176 slb.esid = (ea & ESID_MASK) | SLB_ESID_V; in __spu_trap_data_seg()268 slb->esid = (ea & ESID_MASK) | SLB_ESID_V; in __spu_kernel_slb()282 if (!((slbs[i].esid ^ ea) & ESID_MASK)) in __slb_present()