aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--arch/x86_64/boot/boot.asm4
-rw-r--r--kernel/memory/paging/entry.sats2
-rw-r--r--kernel/memory/paging/table.dats68
-rw-r--r--kernel/memory/paging/table.sats19
-rw-r--r--kernel/prelude/SATS/safe_casts.sats10
5 files changed, 100 insertions, 3 deletions
diff --git a/arch/x86_64/boot/boot.asm b/arch/x86_64/boot/boot.asm
index 3f85423..c229c19 100644
--- a/arch/x86_64/boot/boot.asm
+++ b/arch/x86_64/boot/boot.asm
@@ -94,6 +94,10 @@ error:
hlt
set_up_page_tables:
+ ; map P4 table recursively
+ mov eax, p4_table
+ or eax, 0b11 ; present + writable
+ mov [p4_table + 511 * 8], eax
; map first P4 entry to P3 table
mov eax, p3_table
or eax, 0b11 ; present + writable
diff --git a/kernel/memory/paging/entry.sats b/kernel/memory/paging/entry.sats
index a7f91ee..2d12dbf 100644
--- a/kernel/memory/paging/entry.sats
+++ b/kernel/memory/paging/entry.sats
@@ -1,5 +1,3 @@
-#define ENTRY_COUNT 512
-
staload "kernel/memory/frame.sats"
staload "lib/SATS/valid.sats"
diff --git a/kernel/memory/paging/table.dats b/kernel/memory/paging/table.dats
new file mode 100644
index 0000000..d5029a7
--- /dev/null
+++ b/kernel/memory/paging/table.dats
@@ -0,0 +1,68 @@
+#define ATS_DYNLOADFLAG 0
+
+#include "kernel/prelude/kernel_prelude.hats"
+
+staload "./table.sats"
+staload "./entry.sats"
+
+staload "lib/SATS/valid.sats"
+staload "lib/DATS/valid.dats"
+
+// Calculate table address of next table based on current table and index: P4->P3->P2->P1
+fn next_table_address{l:agz}(pf : !table_t@l | p : ptr(l), index : sizeLt(ENTRY_COUNT)) : Valid Ptr1 = let
+ val entry = p->[index]
+in
+ if (contains_flag(entry,PRESENT) && ~contains_flag(entry,HUGE_PAGE)) then
+ let
+ val next = g1ofg0((Ptr1_to_sz(p) << 9) lor (index << 12))
+ in
+ if next > i2sz(0) then
+ create_valid(sz_to_Ptr1(next))
+ else
+ create_unvalid()
+ end
+ else
+ create_unvalid()
+end
+
+implement set_zero(table) = let
+ implement array_foreach$fwork<entry_t><void>(entry,v) = set_unused(entry)
+in
+ assertloc(array_foreach<entry_t>(table,i2sz(ENTRY_COUNT)) = i2sz(ENTRY_COUNT))
+end
+
+// UNSAFE: unsafe block
+local
+
+ staload UN = "prelude/SATS/unsafe.sats"
+
+in
+
+ implement get_P4() = @{
+ ptr = $UN.ptr_vtake(ulint_to_Ptr1(0xfffffffffffff000ul)),
+ level = (0u | )
+ }
+
+ implement next_table(tp,index) = let
+ val k = tp.ptr
+ val next = next_table_address(k.0 | k.2,index)
+ prval () = k.1 k.0
+ in
+ if is_valid(next) then
+ let
+ val p = unwrap_valid(next)
+ in
+ create_valid(@{
+ ptr = $UN.ptr_vtake(p),
+ level = ( succ(tp.level.0) | )
+ })
+ end
+ else
+ let
+ val () = destroy_unvalid(next)
+ in
+ create_unvalid()
+ end
+ end
+
+end
diff --git a/kernel/memory/paging/table.sats b/kernel/memory/paging/table.sats
new file mode 100644
index 0000000..9a103f4
--- /dev/null
+++ b/kernel/memory/paging/table.sats
@@ -0,0 +1,19 @@
+#define ENTRY_COUNT 512
+
+staload "./entry.sats"
+staload "lib/SATS/valid.sats"
+staload "lib/DATS/valid.dats"
+
+typedef table_t = @[entry_t][ENTRY_COUNT]
+
+vtypedef tablePtr1(c : int) = @{
+ ptr = [l : agz] (table_t@l , table_t@l -<lin,prf> void | ptr l),
+ level = (uint c | )
+ }
+
+// Set all the entries in the table to unused (zero)
+fn set_zero(&table_t): void
+
+fun get_P4() : tablePtr1(0)
+
+fun next_table{n : nat | n < 3}(tablePtr1(n), sizeLt(ENTRY_COUNT)) : Valid (tablePtr1(n+1))
diff --git a/kernel/prelude/SATS/safe_casts.sats b/kernel/prelude/SATS/safe_casts.sats
index f8e34db..8850ef6 100644
--- a/kernel/prelude/SATS/safe_casts.sats
+++ b/kernel/prelude/SATS/safe_casts.sats
@@ -1,4 +1,12 @@
castfn u2u64{v : nat | v <= 0xFFFFFFFFFFFFFFFF}(v : uint v) :<> uint64 v
-castfn u64_to_Ptr0(v : uint64):<> Ptr0
castfn Ptr0_to_u64(v : Ptr0):<> uint64
+
+castfn u64_to_Ptr1{n:nat | n > 0}(v : uint64 n):<> Ptr1
+castfn ulint_to_Ptr1{n: nat | n > 0}(v : ulint n):<> Ptr1
+castfn sz_to_Ptr1{n : nat | n > 0}(v : size_t n):<> Ptr1
+
+castfn u64_to_Ptr0(v : uint64):<> Ptr0
+castfn ulint_to_Ptr0(v : ulint):<> Ptr0
castfn sz_to_Ptr0(v : size_t):<> Ptr0
+
+castfn Ptr1_to_sz(v : Ptr1):<> sizeGte(1)