1mod map_size;
31
32pub use crate::address_space::map_size::MapSize;
33use crate::address_space::map_size::MapSizeEnsureChainError;
34use crate::addresses::{
35    PageSize, PhysicalAddress, PhysicalPage, Size1G, Size2M, Size4K, VirtualAddress,
36};
37use crate::bits::VirtualMemoryPageBits;
38use crate::page_table::pd::{L2Index, PageDirectory, PdEntry, PdEntryKind};
39use crate::page_table::pdpt::{L3Index, PageDirectoryPointerTable, PdptEntry, PdptEntryKind};
40use crate::page_table::pml4::{L4Index, PageMapLevel4, Pml4Entry};
41use crate::page_table::pt::{L1Index, PageTable, PtEntry4k};
42use crate::{FrameAlloc, PhysMapper, PhysMapperExt, read_cr3_phys};
43
44pub struct AddressSpace<'m, M: PhysMapper> {
46    root: PhysicalPage<Size4K>, mapper: &'m M,
48}
49
50pub type RootPage = PhysicalPage<Size4K>;
52
53impl<'m, M: PhysMapper> AddressSpace<'m, M> {
54    #[inline]
60    pub unsafe fn from_current(mapper: &'m M) -> Self {
61        let root_pa = unsafe { read_cr3_phys() };
62        let root = PhysicalPage::<Size4K>::from_addr(root_pa);
63        Self { root, mapper }
64    }
65
66    #[inline]
69    pub const fn from_root(mapper: &'m M, root: PhysicalPage<Size4K>) -> Self {
70        Self { root, mapper }
71    }
72
73    #[inline]
80    pub unsafe fn activate(&self) {
81        let cr3 = self.root.base().as_u64();
82        unsafe {
83            core::arch::asm!("mov cr3, {}", in(reg) cr3, options(nostack, preserves_flags));
84        }
85    }
86
87    #[inline]
89    #[must_use]
90    pub const fn root_page(&self) -> RootPage {
91        self.root
92    }
93
94    #[inline]
98    pub(crate) fn pt_mut(&self, page: PhysicalPage<Size4K>) -> &mut PageTable {
99        self.mapper.pt_mut(page)
100    }
101
102    #[must_use]
106    pub fn query(&self, va: VirtualAddress) -> Option<PhysicalAddress> {
107        match self.walk(va) {
108            WalkResult::Leaf1G { base, .. } => {
109                let off = va.offset::<Size1G>();
110                Some(base.join(off))
111            }
112            WalkResult::Leaf2M { base, .. } => {
113                let off = va.offset::<Size2M>();
114                Some(base.join(off))
115            }
116            WalkResult::L1 { pte, .. } => {
117                let (base4k, _fl) = pte.page_4k()?;
118                let off = va.offset::<Size4K>();
119                Some(base4k.join(off))
120            }
121            WalkResult::Missing => None,
122        }
123    }
124
125    pub fn map_one<A: FrameAlloc, S: MapSize>(
133        &self,
134        alloc: &mut A,
135        va: VirtualAddress,
136        pa: PhysicalAddress,
137        nonleaf_flags: VirtualMemoryPageBits,
138        leaf_flags: VirtualMemoryPageBits,
139    ) -> Result<(), AddressSpaceMapOneError> {
140        debug_assert_eq!(pa.offset::<S>().as_u64(), 0, "physical address not aligned");
141
142        let leaf_tbl = S::ensure_chain_for(self, alloc, va, nonleaf_flags)?;
143        S::set_leaf(self, leaf_tbl, va, pa, leaf_flags);
144        Ok(())
145    }
146
147    pub fn unmap_one(&self, va: VirtualAddress) -> Result<(), &'static str> {
153        match self.walk(va) {
154            WalkResult::L1 { pt, i1, pte } => {
155                if !pte.present() {
156                    return Err("missing: pte");
157                }
158                pt.set_zero(i1);
159                Ok(())
160            }
161            WalkResult::Leaf2M { .. } => Err("found 2MiB leaf (not a 4KiB mapping)"),
162            WalkResult::Leaf1G { .. } => Err("found 1GiB leaf (not a 4KiB mapping)"),
163            WalkResult::Missing => Err("missing: chain"),
164        }
165    }
166
167    pub fn map_region<A: FrameAlloc>(
176        &self,
177        alloc: &mut A,
178        virt_start: VirtualAddress,
179        phys_start: PhysicalAddress,
180        len: u64,
181        nonleaf_flags: VirtualMemoryPageBits,
182        leaf_flags: VirtualMemoryPageBits,
183    ) -> Result<(), AddressSpaceMapRegionError> {
184        let mut off = 0u64;
185        while off < len {
186            let va = VirtualAddress::new(virt_start.as_u64() + off);
187            let pa = PhysicalAddress::new(phys_start.as_u64() + off);
188            let remain = len - off;
189
190            if (va.as_u64() & (Size1G::SIZE - 1) == 0)
192                && (pa.as_u64() & (Size1G::SIZE - 1) == 0)
193                && remain >= Size1G::SIZE
194            {
195                self.map_one::<A, Size1G>(alloc, va, pa, nonleaf_flags, leaf_flags)?;
196                off += Size1G::SIZE;
197                continue;
198            }
199
200            if (va.as_u64() & (Size2M::SIZE - 1) == 0)
202                && (pa.as_u64() & (Size2M::SIZE - 1) == 0)
203                && remain >= Size2M::SIZE
204            {
205                self.map_one::<A, Size2M>(alloc, va, pa, nonleaf_flags, leaf_flags)?;
206                off += Size2M::SIZE;
207                continue;
208            }
209
210            if (va.as_u64() & (Size4K::SIZE - 1) == 0) && (pa.as_u64() & (Size4K::SIZE - 1) == 0) {
212                self.map_one::<A, Size4K>(alloc, va, pa, nonleaf_flags, leaf_flags)?;
213                off += Size4K::SIZE;
214                continue;
215            }
216
217            return Err(AddressSpaceMapRegionError::Unaligned(va, pa));
218        }
219        Ok(())
220    }
221
222    pub fn unmap_region(&self, virt_start: VirtualAddress, len: u64) {
225        let mut off = 0u64;
226        while off < len {
227            let va = VirtualAddress::new(virt_start.as_u64() + off);
228            match self.walk(va) {
229                WalkResult::Leaf1G { pdpt, i3, .. }
230                    if (va.as_u64() & (Size1G::SIZE - 1) == 0) && (len - off) >= Size1G::SIZE =>
231                {
232                    pdpt.set_zero(i3);
234                    off += Size1G::SIZE;
235                }
236                WalkResult::Leaf2M { pd, i2, .. }
237                    if (va.as_u64() & (Size2M::SIZE - 1) == 0) && (len - off) >= Size2M::SIZE =>
238                {
239                    pd.set_zero(i2);
241                    off += Size2M::SIZE;
242                }
243                WalkResult::L1 { pt, i1, pte } => {
244                    if pte.present() {
245                        pt.set_zero(i1);
246                    }
247                    off += Size4K::SIZE;
248                }
249                _ => off += Size4K::SIZE,
253            }
254        }
255    }
256
257    #[allow(clippy::similar_names)]
259    pub fn collapse_empty_tables<F: FrameAlloc>(&self, free: &mut F) {
260        let pml4 = self.pml4_mut();
261
262        for i4 in 0..512 {
264            let e4 = pml4.get(L4Index::new(i4));
265            let Some(pdpt_page) = e4.next_table() else {
266                continue;
267            };
268            let pdpt = self.pdpt_mut(pdpt_page);
269
270            let mut used_l3 = false;
272            for i3 in 0..512 {
273                match pdpt.get(L3Index::new(i3)).kind() {
274                    Some(PdptEntryKind::Leaf1GiB(_, _)) => {
275                        used_l3 = true;
276                    }
277                    Some(PdptEntryKind::NextPageDirectory(pd_page, _)) => {
278                        let pd = self.pd_mut(pd_page);
279                        let mut used_l2 = false;
281                        for i2 in 0..512 {
282                            match pd.get(L2Index::new(i2)).kind() {
283                                Some(PdEntryKind::Leaf2MiB(_, _)) => {
284                                    used_l2 = true;
285                                }
286                                Some(PdEntryKind::NextPageTable(pt_page, _)) => {
287                                    let pt = self.pt_mut(pt_page);
288                                    let mut any_present = false;
289                                    for i1 in 0..512 {
290                                        if pt.get(L1Index::new(i1)).present() {
291                                            any_present = true;
292                                            break;
293                                        }
294                                    }
295                                    if any_present {
296                                        used_l2 = true;
297                                    } else {
298                                        pd.set(L2Index::new(i2), PdEntry::zero());
300                                        free.free_4k(pt_page);
301                                    }
302                                }
303                                None => {}
304                            }
305                        }
306                        if used_l2 {
308                            used_l3 = true;
309                        } else {
310                            pdpt.set(L3Index::new(i3), PdptEntry::zero());
311                            free.free_4k(pd_page);
312                        }
313                    }
314                    None => {}
315                }
316            }
317
318            if !used_l3 {
320                pml4.set(L4Index::new(i4), Pml4Entry::zero());
321                free.free_4k(pdpt_page);
322            }
323        }
324    }
325
326    #[allow(clippy::similar_names)]
328    fn walk(&self, va: VirtualAddress) -> WalkResult<'_> {
329        let (i4, i3, i2, i1) = crate::page_table::split_indices(va);
330
331        let pml4 = self.pml4_mut();
333        let Some(pdpt_page) = pml4.get(i4).next_table() else {
334            return WalkResult::Missing;
335        };
336
337        let pdpt = self.pdpt_mut(pdpt_page);
339        match pdpt.get(i3).kind() {
340            Some(PdptEntryKind::Leaf1GiB(base, _fl)) => WalkResult::Leaf1G { base, pdpt, i3 },
341            Some(PdptEntryKind::NextPageDirectory(pd_page, _fl)) => {
342                let pd = self.pd_mut(pd_page);
344                match pd.get(i2).kind() {
345                    Some(PdEntryKind::Leaf2MiB(base, _fl)) => WalkResult::Leaf2M { base, pd, i2 },
346                    Some(PdEntryKind::NextPageTable(pt_page, _fl)) => {
347                        let pt = self.pt_mut(pt_page);
349                        let pte = pt.get(i1);
350                        WalkResult::L1 { pt, i1, pte }
351                    }
352                    None => WalkResult::Missing,
353                }
354            }
355            None => WalkResult::Missing,
356        }
357    }
358
359    #[inline]
363    pub(crate) fn pml4_mut(&self) -> &mut PageMapLevel4 {
364        self.mapper.pml4_mut(self.root)
365    }
366
367    #[inline]
371    pub(crate) fn pdpt_mut(&self, page: PhysicalPage<Size4K>) -> &mut PageDirectoryPointerTable {
372        self.mapper.pdpt_mut(page)
373    }
374
375    #[inline]
379    pub(crate) fn pd_mut(&self, page: PhysicalPage<Size4K>) -> &mut PageDirectory {
380        self.mapper.pd_mut(page)
381    }
382
383    #[inline]
387    pub(crate) fn zero_pdpt(&self, page: PhysicalPage<Size4K>) {
388        self.mapper.zero_pdpt(page);
389    }
390
391    #[inline]
395    pub(crate) fn zero_pd(&self, page: PhysicalPage<Size4K>) {
396        self.mapper.zero_pd(page);
397    }
398
399    #[inline]
403    pub(crate) fn zero_pt(&self, page: PhysicalPage<Size4K>) {
404        self.mapper.zero_pt(page);
405    }
406}
407
408#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
410pub enum AddressSpaceMapOneError {
411    #[error(transparent)]
412    OutOfMemory(#[from] MapSizeEnsureChainError),
413}
414
415#[derive(Debug, Copy, Clone, PartialEq, Eq, thiserror::Error)]
417pub enum AddressSpaceMapRegionError {
418    #[error(transparent)]
419    OutOfMemory(#[from] MapSizeEnsureChainError),
420    #[error("unaligned va/pa for remaining size: {0:?} -> {1:?}")]
421    Unaligned(VirtualAddress, PhysicalAddress),
422}
423
424impl From<AddressSpaceMapOneError> for AddressSpaceMapRegionError {
425    fn from(e: AddressSpaceMapOneError) -> Self {
426        match e {
427            AddressSpaceMapOneError::OutOfMemory(e) => Self::OutOfMemory(e),
428        }
429    }
430}
431
432#[derive(Copy, Clone, Debug, Eq, PartialEq)]
434pub enum EnsureTarget {
435    L3For1G,
437    L2For2M,
439    L1For4K,
441}
442
443#[allow(dead_code)]
445enum WalkResult<'a> {
446    Leaf1G {
448        base: PhysicalPage<Size1G>,
449        pdpt: &'a mut PageDirectoryPointerTable,
450        i3: L3Index,
451    },
452    Leaf2M {
454        base: PhysicalPage<Size2M>,
455        pd: &'a mut PageDirectory,
456        i2: L2Index,
457    },
458    L1 {
460        pt: &'a mut PageTable,
461        i1: L1Index,
462        pte: PtEntry4k,
463    },
464    Missing,
466}