/DragonOS-0.1.9/kernel/src/arch/riscv64/mm/ |
D | init.rs | 11 MMArch, 74 let _old_page_table = MMArch::table(PageTableKind::Kernel); in riscv_mm_init() 81 let mut mapper: crate::mm::page::PageMapper<MMArch, &mut BumpAllocator<MMArch>> = in riscv_mm_init() 82 crate::mm::page::PageMapper::<MMArch, _>::create( in riscv_mm_init() 93 let empty_entry = PageEntry::<MMArch>::from_usize(0); in riscv_mm_init() 94 for i in 0..MMArch::PAGE_ENTRY_NUM { in riscv_mm_init() 106 for i in 0..((area.size + MMArch::PAGE_SIZE - 1) / MMArch::PAGE_SIZE) { in riscv_mm_init() 107 let paddr = area.base.add(i * MMArch::PAGE_SIZE); in riscv_mm_init() 108 let vaddr = unsafe { MMArch::phys_2_virt(paddr) }.unwrap(); in riscv_mm_init() 109 let flags = kernel_page_flags::<MMArch>(vaddr).set_execute(true); in riscv_mm_init() [all …]
|
/DragonOS-0.1.9/kernel/src/mm/ |
D | no_init.rs | 15 mm::{MMArch, MemoryManagementArch, PhysAddr}, 35 data: [u64; MMArch::PAGE_SIZE], 57 data: [0; MMArch::PAGE_SIZE], in new() 76 assert!(vaddr.check_aligned(MMArch::PAGE_SIZE)); in allocate_page() 87 let index = offset / MMArch::PAGE_SIZE; in free_page() 144 let flags: PageFlags<MMArch> = PageFlags::new().set_write(true); in pseudo_map_phys() 153 let flags: PageFlags<MMArch> = PageFlags::new().set_write(false).set_execute(true); in pseudo_map_phys_ro() 163 flags: PageFlags<MMArch>, in pseudo_map_phys_with_flags() argument 165 assert!(vaddr.check_aligned(MMArch::PAGE_SIZE)); in pseudo_map_phys_with_flags() 166 assert!(paddr.check_aligned(MMArch::PAGE_SIZE)); in pseudo_map_phys_with_flags() [all …]
|
D | early_ioremap.rs | 4 arch::MMArch, 28 const SLOT_CNT: usize = MMArch::FIXMAP_SIZE / MMArch::PAGE_SIZE; 81 if phys.check_aligned(MMArch::PAGE_SIZE) == false { in map() 126 let map_size = slot_count * MMArch::PAGE_SIZE; in map() 153 if virt < MMArch::FIXMAP_START_VADDR || virt >= MMArch::FIXMAP_END_VADDR { in unmap() 195 MMArch::FIXMAP_START_VADDR + idx * MMArch::PAGE_SIZE in idx_to_virt()
|
D | ucontext.rs | 20 arch::{mm::PageMapper, CurrentIrqArch, MMArch}, 127 user_mapper: MMArch::setup_new_usermapper()?, in new() 132 brk_start: MMArch::USER_BRK_START, in new() 133 brk: MMArch::USER_BRK_START, in new() 175 let tmp_flags: PageFlags<MMArch> = PageFlags::new().set_write(true); in try_clone() 180 PageFrameCount::new(vma_guard.region.size() / MMArch::PAGE_SIZE), in try_clone() 192 MMArch::phys_2_virt( in try_clone() 203 MMArch::phys_2_virt( in try_clone() 217 new_frame.copy_from_nonoverlapping(current_frame, MMArch::PAGE_SIZE); in try_clone() 260 let addr = hint.data() & (!MMArch::PAGE_OFFSET_MASK); in map_anonymous() [all …]
|
D | kernel_mapper.rs | 11 mm::{allocator::page_frame::PageFrameCount, MMArch, MemoryManagementArch}, 107 flags: PageFlags<MMArch>, in map_phys_with_size() argument 114 let count = PageFrameCount::new(page_align_up(size) / MMArch::PAGE_SIZE); in map_phys_with_size() 124 vaddr += MMArch::PAGE_SIZE; in map_phys_with_size() 125 paddr += MMArch::PAGE_SIZE; in map_phys_with_size()
|
D | c_adapter.rs | 13 mm::MMArch, 31 let count = PageFrameCount::new(page_align_up(size) / MMArch::PAGE_SIZE); in rs_pseudo_map_phys() 40 let count = PageFrameCount::new(page_align_up(size) / MMArch::PAGE_SIZE); in rs_map_phys() 43 let mut page_flags: PageFlags<MMArch> = PageFlags::new().set_execute(true).set_write(true); in rs_map_phys() 60 vaddr += MMArch::PAGE_SIZE; in rs_map_phys() 61 paddr += MMArch::PAGE_SIZE; in rs_map_phys()
|
D | syscall.rs | 7 arch::MMArch, 166 if new_addr < address_space.brk_start || new_addr >= MMArch::USER_END_VADDR { in brk() 286 if !old_vaddr.check_aligned(MMArch::PAGE_SIZE) { in mremap() 347 assert!(start_vaddr.check_aligned(MMArch::PAGE_SIZE)); in munmap() 348 assert!(check_aligned(len, MMArch::PAGE_SIZE)); in munmap() 359 let page_count = PageFrameCount::new(len / MMArch::PAGE_SIZE); in munmap() 381 assert!(start_vaddr.check_aligned(MMArch::PAGE_SIZE)); in mprotect() 382 assert!(check_aligned(len, MMArch::PAGE_SIZE)); in mprotect() 395 let page_count = PageFrameCount::new(len / MMArch::PAGE_SIZE); in mprotect()
|
D | init.rs | 4 arch::MMArch, driver::serial::serial8250::send_to_default_serial8250_port, 41 MMArch::init(); in mm_init()
|
D | page.rs | 10 arch::{interrupt::ipi::send_ipi, MMArch}, 143 let mask = (MMArch::PAGE_ENTRY_NUM << shift) - 1; in index_of() 147 return Some((addr.data() >> shift) & MMArch::PAGE_ENTRY_MASK); in index_of() 183 data: MMArch::make_entry(paddr, flags.data()), in new() 646 MMArch::write_bytes(MMArch::phys_2_virt(frame).unwrap(), 0, MMArch::PAGE_SIZE); in map_phys() 876 MMArch::invalidate_page(self.virt); in drop() 946 impl Flusher<MMArch> for InactiveFlusher { 947 fn consume(&mut self, flush: PageFlush<MMArch>) { in consume() argument 963 addr & !(MMArch::PAGE_SIZE - 1) in round_down_to_page_size() 968 round_down_to_page_size(addr + MMArch::PAGE_SIZE - 1) in round_up_to_page_size()
|
D | mmio_buddy.rs | 7 mm::{MMArch, MemoryManagementArch}, 525 assert!(vaddr.check_aligned(MMArch::PAGE_SIZE)); in release_mmio() 526 assert!(length & (MMArch::PAGE_SIZE - 1) == 0); in release_mmio() 535 let page_count = length / MMArch::PAGE_SIZE; in release_mmio() 548 PageFlags<MMArch>, in release_mmio() 549 crate::mm::page::PageFlush<MMArch>, in release_mmio() 553 .unmap_phys(vaddr + i * MMArch::PAGE_SIZE, false); in release_mmio() 633 size & (MMArch::PAGE_SIZE - 1) == 0, in from_raw()
|
/DragonOS-0.1.9/kernel/src/driver/net/ |
D | dma.rs | 3 use crate::arch::MMArch; 20 ((pages * PAGE_SIZE + MMArch::PAGE_SIZE - 1) / MMArch::PAGE_SIZE).next_power_of_two(), in dma_alloc() 24 let virt = MMArch::phys_2_virt(paddr).unwrap(); in dma_alloc() 26 core::ptr::write_bytes(virt.data() as *mut u8, 0, count.data() * MMArch::PAGE_SIZE); in dma_alloc() 28 let dma_flags: PageFlags<MMArch> = PageFlags::mmio_flags(); in dma_alloc() 38 NonNull::new(MMArch::phys_2_virt(paddr).unwrap().data() as _).unwrap(), in dma_alloc() 47 ((pages * PAGE_SIZE + MMArch::PAGE_SIZE - 1) / MMArch::PAGE_SIZE).next_power_of_two(), in dma_dealloc()
|
/DragonOS-0.1.9/kernel/src/driver/virtio/ |
D | virtio_impl.rs | 3 use crate::arch::MMArch; 26 ((pages * PAGE_SIZE + MMArch::PAGE_SIZE - 1) / MMArch::PAGE_SIZE).next_power_of_two(), in dma_alloc() 31 let virt = MMArch::phys_2_virt(paddr).unwrap(); in dma_alloc() 33 core::ptr::write_bytes(virt.data() as *mut u8, 0, count.data() * MMArch::PAGE_SIZE); in dma_alloc() 35 let dma_flags: PageFlags<MMArch> = PageFlags::mmio_flags(); in dma_alloc() 45 NonNull::new(MMArch::phys_2_virt(paddr).unwrap().data() as _).unwrap(), in dma_alloc() 58 ((pages * PAGE_SIZE + MMArch::PAGE_SIZE - 1) / MMArch::PAGE_SIZE).next_power_of_two(), in dma_dealloc() 91 return MMArch::virt_2_phys(vaddr).unwrap().data(); in share()
|
/DragonOS-0.1.9/kernel/src/arch/x86_64/mm/ |
D | mod.rs | 22 arch::MMArch, 52 static INNER_ALLOCATOR: SpinLock<Option<BuddyAllocator<MMArch>>> = SpinLock::new(None); 157 MMArch::phys_2_virt(PhysAddr::new(0)).unwrap().data() in init() 410 unsafe { MMArch::virt_2_phys(VirtAddr::new(page_align_up(virt_offset))) }.unwrap(); in allocator_init() 421 let _old_page_table = MMArch::table(PageTableKind::Kernel); in allocator_init() 427 let mut mapper: crate::mm::page::PageMapper<MMArch, &mut BumpAllocator<MMArch>> = in allocator_init() 428 crate::mm::page::PageMapper::<MMArch, _>::create( in allocator_init() 439 let empty_entry = PageEntry::<MMArch>::from_usize(0); in allocator_init() 440 for i in 0..MMArch::PAGE_ENTRY_NUM { in allocator_init() 452 for i in 0..((area.size + MMArch::PAGE_SIZE - 1) / MMArch::PAGE_SIZE) { in allocator_init() [all …]
|
/DragonOS-0.1.9/kernel/src/arch/riscv64/ |
D | elf.rs | 1 use crate::{arch::MMArch, libs::elf::ElfArch, mm::MemoryManagementArch}; 7 const ELF_ET_DYN_BASE: usize = MMArch::USER_END_VADDR.data() / 3 * 2; 9 const ELF_PAGE_SIZE: usize = MMArch::PAGE_SIZE;
|
/DragonOS-0.1.9/kernel/src/arch/x86_64/ |
D | elf.rs | 1 use crate::{arch::MMArch, libs::elf::ElfArch, mm::MemoryManagementArch}; 7 const ELF_ET_DYN_BASE: usize = MMArch::USER_END_VADDR.data() / 3 * 2; 9 const ELF_PAGE_SIZE: usize = MMArch::PAGE_SIZE;
|
/DragonOS-0.1.9/kernel/src/mm/allocator/ |
D | kernel_allocator.rs | 7 mm::{MMArch, MemoryManagementArch, VirtAddr}, 30 let count = (page_align_up(layout.size()) / MMArch::PAGE_SIZE).next_power_of_two(); in alloc_in_buddy() 36 let virt_addr = unsafe { MMArch::phys_2_virt(phy_addr).ok_or(AllocError)? }; in alloc_in_buddy() 44 allocated_frame_count.data() * MMArch::PAGE_SIZE, in alloc_in_buddy() 52 let count = (page_align_up(layout.size()) / MMArch::PAGE_SIZE).next_power_of_two(); in free_in_buddy() 54 let phy_addr = MMArch::virt_2_phys(VirtAddr::new(ptr as usize)).unwrap(); in free_in_buddy()
|
D | page_frame.rs | 7 arch::{mm::LockedFrameAllocator, MMArch}, 22 number: paddr.data() / MMArch::PAGE_SIZE, in new() 38 return PhysAddr::new(self.number * MMArch::PAGE_SIZE); in phys_address() 97 number: vaddr.data() / MMArch::PAGE_SIZE, in new() 109 return VirtAddr::new(self.number * MMArch::PAGE_SIZE); in virt_address() 185 return self.0 * MMArch::PAGE_SIZE; in bytes() 192 if bytes & MMArch::PAGE_OFFSET_MASK != 0 { in from_bytes() 195 return Some(Self(bytes / MMArch::PAGE_SIZE)); in from_bytes()
|
/DragonOS-0.1.9/kernel/src/arch/x86_64/init/ |
D | mod.rs | 20 MMArch, 48 MMArch::phys_2_virt(PhysAddr::new(&GDT_Table as *const usize as usize)).unwrap(); in kernel_main() 50 MMArch::phys_2_virt(PhysAddr::new(&IDT_Table as *const usize as usize)).unwrap(); in kernel_main() 75 MMArch::phys_2_virt(PhysAddr::new(&GDT_Table as *const usize as usize)).unwrap(); in early_setup_arch() 77 MMArch::phys_2_virt(PhysAddr::new(&IDT_Table as *const usize as usize)).unwrap(); in early_setup_arch()
|
/DragonOS-0.1.9/kernel/src/exception/ |
D | ipi.rs | 5 arch::{sched::sched, MMArch}, 66 unsafe { MMArch::invalidate_all() }; in handle()
|
/DragonOS-0.1.9/kernel/src/driver/video/ |
D | mod.rs | 4 arch::MMArch, 99 page_align_up(frame_buffer_info_guard.buf_size()) / MMArch::PAGE_SIZE, in init_frame_buffer() 101 let page_flags: PageFlags<MMArch> = PageFlags::new().set_execute(true).set_write(true); in init_frame_buffer() 116 vaddr += MMArch::PAGE_SIZE; in init_frame_buffer() 117 paddr += MMArch::PAGE_SIZE; in init_frame_buffer()
|
/DragonOS-0.1.9/kernel/src/libs/ |
D | align.rs | 8 use crate::{arch::MMArch, mm::MemoryManagementArch, KERNEL_ALLOCATOR}; 130 let page_size = MMArch::PAGE_SIZE; in page_align_up() 135 let page_size = MMArch::PAGE_SIZE; in page_align_down()
|
/DragonOS-0.1.9/kernel/src/driver/open_firmware/ |
D | fdt.rs | 186 arch::MMArch, in early_init_dt_add_memory() 197 if size < (MMArch::PAGE_SIZE - (base & (!MMArch::PAGE_MASK))) { in early_init_dt_add_memory() 201 if PhysAddr::new(base).check_aligned(MMArch::PAGE_SIZE) == false { in early_init_dt_add_memory() 202 size -= MMArch::PAGE_SIZE - (base & (!MMArch::PAGE_MASK)); in early_init_dt_add_memory()
|
/DragonOS-0.1.9/kernel/src/arch/x86_64/kvm/vmx/ |
D | ept.rs | 3 use crate::arch::MMArch; 95 flags: PageFlags<MMArch>, in walk() argument
|
/DragonOS-0.1.9/kernel/src/driver/firmware/efi/ |
D | init.rs | 7 arch::MMArch, 205 && ((efi_vaddr - md.virt_start) < (md.page_count << (MMArch::PAGE_SHIFT as u64))) in efi_vaddr_2_paddr() 225 (md.phys_start + (md.page_count << (MMArch::PAGE_SHIFT as u64))) as usize, in reserve_memory_regions() 231 let size = (page_count << (MMArch::PAGE_SHIFT as u64)) as usize; in reserve_memory_regions()
|
/DragonOS-0.1.9/kernel/src/virt/kvm/ |
D | vm.rs | 2 use crate::arch::MMArch; 63 if (mem.memory_size & (MMArch::PAGE_SIZE - 1) as u64) != 0 in set_user_memory_region() 64 || (mem.guest_phys_addr & (MMArch::PAGE_SIZE - 1) as u64) != 0 in set_user_memory_region()
|