xref: /haiku/src/system/kernel/arch/arm64/VMSAv8TranslationMap.cpp (revision a25542e7ef4b7c96878d0a7778d63f03bdcb60da)
1*a25542e7Smilek7 /*
2*a25542e7Smilek7  * Copyright 2022 Haiku, Inc. All Rights Reserved.
3*a25542e7Smilek7  * Distributed under the terms of the MIT License.
4*a25542e7Smilek7  */
5*a25542e7Smilek7 #include "VMSAv8TranslationMap.h"
6*a25542e7Smilek7 
7*a25542e7Smilek7 #include <util/AutoLock.h>
8*a25542e7Smilek7 #include <util/ThreadAutoLock.h>
9*a25542e7Smilek7 #include <vm/vm_page.h>
10*a25542e7Smilek7 #include <vm/vm_priv.h>
11*a25542e7Smilek7 
12*a25542e7Smilek7 
13*a25542e7Smilek7 static constexpr uint64_t kPteAddrMask = (((1UL << 36) - 1) << 12);
14*a25542e7Smilek7 static constexpr uint64_t kPteAttrMask = ~(kPteAddrMask | 0x3);
15*a25542e7Smilek7 
16*a25542e7Smilek7 static constexpr uint64_t kAttrSWDBM = (1UL << 55);
17*a25542e7Smilek7 static constexpr uint64_t kAttrUXN = (1UL << 54);
18*a25542e7Smilek7 static constexpr uint64_t kAttrPXN = (1UL << 53);
19*a25542e7Smilek7 static constexpr uint64_t kAttrDBM = (1UL << 51);
20*a25542e7Smilek7 static constexpr uint64_t kAttrNG = (1UL << 11);
21*a25542e7Smilek7 static constexpr uint64_t kAttrAF = (1UL << 10);
22*a25542e7Smilek7 static constexpr uint64_t kAttrSH1 = (1UL << 9);
23*a25542e7Smilek7 static constexpr uint64_t kAttrSH0 = (1UL << 8);
24*a25542e7Smilek7 static constexpr uint64_t kAttrAP2 = (1UL << 7);
25*a25542e7Smilek7 static constexpr uint64_t kAttrAP1 = (1UL << 6);
26*a25542e7Smilek7 
27*a25542e7Smilek7 uint32_t VMSAv8TranslationMap::fHwFeature;
28*a25542e7Smilek7 uint64_t VMSAv8TranslationMap::fMair;
29*a25542e7Smilek7 
30*a25542e7Smilek7 
31*a25542e7Smilek7 VMSAv8TranslationMap::VMSAv8TranslationMap(
32*a25542e7Smilek7 	bool kernel, phys_addr_t pageTable, int pageBits, int vaBits, int minBlockLevel)
33*a25542e7Smilek7 	:
34*a25542e7Smilek7 	fIsKernel(kernel),
35*a25542e7Smilek7 	fPageTable(pageTable),
36*a25542e7Smilek7 	fPageBits(pageBits),
37*a25542e7Smilek7 	fVaBits(vaBits),
38*a25542e7Smilek7 	fMinBlockLevel(minBlockLevel)
39*a25542e7Smilek7 {
40*a25542e7Smilek7 	dprintf("VMSAv8TranslationMap\n");
41*a25542e7Smilek7 
42*a25542e7Smilek7 	fInitialLevel = CalcStartLevel(fVaBits, fPageBits);
43*a25542e7Smilek7 }
44*a25542e7Smilek7 
45*a25542e7Smilek7 
46*a25542e7Smilek7 VMSAv8TranslationMap::~VMSAv8TranslationMap()
47*a25542e7Smilek7 {
48*a25542e7Smilek7 	dprintf("~VMSAv8TranslationMap\n");
49*a25542e7Smilek7 
50*a25542e7Smilek7 	// FreeTable(fPageTable, fInitialLevel);
51*a25542e7Smilek7 }
52*a25542e7Smilek7 
53*a25542e7Smilek7 
54*a25542e7Smilek7 int
55*a25542e7Smilek7 VMSAv8TranslationMap::CalcStartLevel(int vaBits, int pageBits)
56*a25542e7Smilek7 {
57*a25542e7Smilek7 	int level = 4;
58*a25542e7Smilek7 
59*a25542e7Smilek7 	int bitsLeft = vaBits - pageBits;
60*a25542e7Smilek7 	while (bitsLeft > 0) {
61*a25542e7Smilek7 		int tableBits = pageBits - 3;
62*a25542e7Smilek7 		bitsLeft -= tableBits;
63*a25542e7Smilek7 		level--;
64*a25542e7Smilek7 	}
65*a25542e7Smilek7 
66*a25542e7Smilek7 	ASSERT(level >= 0);
67*a25542e7Smilek7 
68*a25542e7Smilek7 	return level;
69*a25542e7Smilek7 }
70*a25542e7Smilek7 
71*a25542e7Smilek7 
72*a25542e7Smilek7 bool
73*a25542e7Smilek7 VMSAv8TranslationMap::Lock()
74*a25542e7Smilek7 {
75*a25542e7Smilek7 	recursive_lock_lock(&fLock);
76*a25542e7Smilek7 	return true;
77*a25542e7Smilek7 }
78*a25542e7Smilek7 
79*a25542e7Smilek7 
80*a25542e7Smilek7 void
81*a25542e7Smilek7 VMSAv8TranslationMap::Unlock()
82*a25542e7Smilek7 {
83*a25542e7Smilek7 	if (recursive_lock_get_recursion(&fLock) == 1) {
84*a25542e7Smilek7 		// we're about to release it for the last time
85*a25542e7Smilek7 		Flush();
86*a25542e7Smilek7 	}
87*a25542e7Smilek7 	recursive_lock_unlock(&fLock);
88*a25542e7Smilek7 }
89*a25542e7Smilek7 
90*a25542e7Smilek7 
91*a25542e7Smilek7 addr_t
92*a25542e7Smilek7 VMSAv8TranslationMap::MappedSize() const
93*a25542e7Smilek7 {
94*a25542e7Smilek7 	panic("VMSAv8TranslationMap::MappedSize not implemented");
95*a25542e7Smilek7 	return 0;
96*a25542e7Smilek7 }
97*a25542e7Smilek7 
98*a25542e7Smilek7 
99*a25542e7Smilek7 size_t
100*a25542e7Smilek7 VMSAv8TranslationMap::MaxPagesNeededToMap(addr_t start, addr_t end) const
101*a25542e7Smilek7 {
102*a25542e7Smilek7 	size_t result = 0;
103*a25542e7Smilek7 	size_t size = end - start + 1;
104*a25542e7Smilek7 
105*a25542e7Smilek7 	for (int i = fInitialLevel; i < 3; i++) {
106*a25542e7Smilek7 		int tableBits = fPageBits - 3;
107*a25542e7Smilek7 		int shift = tableBits * (3 - i) + fPageBits;
108*a25542e7Smilek7 		uint64_t entrySize = 1UL << shift;
109*a25542e7Smilek7 
110*a25542e7Smilek7 		result += size / entrySize + 2;
111*a25542e7Smilek7 	}
112*a25542e7Smilek7 
113*a25542e7Smilek7 	return result;
114*a25542e7Smilek7 }
115*a25542e7Smilek7 
116*a25542e7Smilek7 
117*a25542e7Smilek7 uint64_t*
118*a25542e7Smilek7 VMSAv8TranslationMap::TableFromPa(phys_addr_t pa)
119*a25542e7Smilek7 {
120*a25542e7Smilek7 	return reinterpret_cast<uint64_t*>(KERNEL_PMAP_BASE + pa);
121*a25542e7Smilek7 }
122*a25542e7Smilek7 
123*a25542e7Smilek7 
124*a25542e7Smilek7 uint64_t
125*a25542e7Smilek7 VMSAv8TranslationMap::MakeBlock(phys_addr_t pa, int level, uint64_t attr)
126*a25542e7Smilek7 {
127*a25542e7Smilek7 	ASSERT(level >= fMinBlockLevel && level < 4);
128*a25542e7Smilek7 
129*a25542e7Smilek7 	return pa | attr | (level == 3 ? 0x3 : 0x1);
130*a25542e7Smilek7 }
131*a25542e7Smilek7 
132*a25542e7Smilek7 
133*a25542e7Smilek7 void
134*a25542e7Smilek7 VMSAv8TranslationMap::FreeTable(phys_addr_t ptPa, int level)
135*a25542e7Smilek7 {
136*a25542e7Smilek7 	ASSERT(level < 3);
137*a25542e7Smilek7 
138*a25542e7Smilek7 	if (level + 1 < 3) {
139*a25542e7Smilek7 		int tableBits = fPageBits - 3;
140*a25542e7Smilek7 		uint64_t tableSize = 1UL << tableBits;
141*a25542e7Smilek7 
142*a25542e7Smilek7 		uint64_t* pt = TableFromPa(ptPa);
143*a25542e7Smilek7 		for (uint64_t i = 0; i < tableSize; i++) {
144*a25542e7Smilek7 			uint64_t pte = pt[i];
145*a25542e7Smilek7 			if ((pte & 0x3) == 0x3) {
146*a25542e7Smilek7 				FreeTable(pte & kPteAddrMask, level + 1);
147*a25542e7Smilek7 			}
148*a25542e7Smilek7 		}
149*a25542e7Smilek7 	}
150*a25542e7Smilek7 
151*a25542e7Smilek7 	vm_page* page = vm_lookup_page(ptPa >> fPageBits);
152*a25542e7Smilek7 	vm_page_set_state(page, PAGE_STATE_FREE);
153*a25542e7Smilek7 }
154*a25542e7Smilek7 
155*a25542e7Smilek7 
156*a25542e7Smilek7 phys_addr_t
157*a25542e7Smilek7 VMSAv8TranslationMap::MakeTable(
158*a25542e7Smilek7 	phys_addr_t ptPa, int level, int index, vm_page_reservation* reservation)
159*a25542e7Smilek7 {
160*a25542e7Smilek7 	if (level == 3)
161*a25542e7Smilek7 		return 0;
162*a25542e7Smilek7 
163*a25542e7Smilek7 	uint64_t* pte = &TableFromPa(ptPa)[index];
164*a25542e7Smilek7 	vm_page* page = NULL;
165*a25542e7Smilek7 
166*a25542e7Smilek7 retry:
167*a25542e7Smilek7 	uint64_t oldPte = atomic_get64((int64*) pte);
168*a25542e7Smilek7 
169*a25542e7Smilek7 	int type = oldPte & 0x3;
170*a25542e7Smilek7 	if (type == 0x3) {
171*a25542e7Smilek7 		return oldPte & kPteAddrMask;
172*a25542e7Smilek7 	} else if (reservation != NULL) {
173*a25542e7Smilek7 		if (page == NULL)
174*a25542e7Smilek7 			page = vm_page_allocate_page(reservation, PAGE_STATE_WIRED | VM_PAGE_ALLOC_CLEAR);
175*a25542e7Smilek7 		phys_addr_t newTablePa = page->physical_page_number << fPageBits;
176*a25542e7Smilek7 
177*a25542e7Smilek7 		if (type == 0x1) {
178*a25542e7Smilek7 			// If we're replacing existing block mapping convert it to pagetable
179*a25542e7Smilek7 			int tableBits = fPageBits - 3;
180*a25542e7Smilek7 			int shift = tableBits * (3 - (level + 1)) + fPageBits;
181*a25542e7Smilek7 			uint64_t entrySize = 1UL << shift;
182*a25542e7Smilek7 			uint64_t tableSize = 1UL << tableBits;
183*a25542e7Smilek7 
184*a25542e7Smilek7 			uint64_t* newTable = TableFromPa(newTablePa);
185*a25542e7Smilek7 			uint64_t addr = oldPte & kPteAddrMask;
186*a25542e7Smilek7 			uint64_t attr = oldPte & kPteAttrMask;
187*a25542e7Smilek7 
188*a25542e7Smilek7 			for (uint64_t i = 0; i < tableSize; i++) {
189*a25542e7Smilek7 				newTable[i] = MakeBlock(addr + i * entrySize, level + 1, attr);
190*a25542e7Smilek7 			}
191*a25542e7Smilek7 		}
192*a25542e7Smilek7 
193*a25542e7Smilek7 		asm("dsb ish");
194*a25542e7Smilek7 
195*a25542e7Smilek7 		// FIXME: this is not enough on real hardware with SMP
196*a25542e7Smilek7 		if ((uint64_t) atomic_test_and_set64((int64*) pte, newTablePa | 0x3, oldPte) != oldPte)
197*a25542e7Smilek7 			goto retry;
198*a25542e7Smilek7 
199*a25542e7Smilek7 		return newTablePa;
200*a25542e7Smilek7 	}
201*a25542e7Smilek7 
202*a25542e7Smilek7 	return 0;
203*a25542e7Smilek7 }
204*a25542e7Smilek7 
205*a25542e7Smilek7 
206*a25542e7Smilek7 void
207*a25542e7Smilek7 VMSAv8TranslationMap::MapRange(phys_addr_t ptPa, int level, addr_t va, phys_addr_t pa, size_t size,
208*a25542e7Smilek7 	VMSAv8TranslationMap::VMAction action, uint64_t attr, vm_page_reservation* reservation)
209*a25542e7Smilek7 {
210*a25542e7Smilek7 	ASSERT(level < 4);
211*a25542e7Smilek7 	ASSERT(ptPa != 0);
212*a25542e7Smilek7 	ASSERT(reservation != NULL || action != VMAction::MAP);
213*a25542e7Smilek7 
214*a25542e7Smilek7 	int tableBits = fPageBits - 3;
215*a25542e7Smilek7 	uint64_t tableMask = (1UL << tableBits) - 1;
216*a25542e7Smilek7 
217*a25542e7Smilek7 	int shift = tableBits * (3 - level) + fPageBits;
218*a25542e7Smilek7 	uint64_t entrySize = 1UL << shift;
219*a25542e7Smilek7 
220*a25542e7Smilek7 	uint64_t entryMask = entrySize - 1;
221*a25542e7Smilek7 	uint64_t nextVa = va;
222*a25542e7Smilek7 	uint64_t end = va + size;
223*a25542e7Smilek7 	int index;
224*a25542e7Smilek7 
225*a25542e7Smilek7 	// Handle misaligned header that straddles entry boundary in next-level table
226*a25542e7Smilek7 	if ((va & entryMask) != 0) {
227*a25542e7Smilek7 		uint64_t aligned = (va & ~entryMask) + entrySize;
228*a25542e7Smilek7 		if (end > aligned) {
229*a25542e7Smilek7 			index = (va >> shift) & tableMask;
230*a25542e7Smilek7 			phys_addr_t table = MakeTable(ptPa, level, index, reservation);
231*a25542e7Smilek7 			MapRange(table, level + 1, va, pa, aligned - va, action, attr, reservation);
232*a25542e7Smilek7 			nextVa = aligned;
233*a25542e7Smilek7 		}
234*a25542e7Smilek7 	}
235*a25542e7Smilek7 
236*a25542e7Smilek7 	// Handle fully aligned and appropriately sized chunks
237*a25542e7Smilek7 	while (nextVa + entrySize <= end) {
238*a25542e7Smilek7 		phys_addr_t targetPa = pa + (nextVa - va);
239*a25542e7Smilek7 		index = (nextVa >> shift) & tableMask;
240*a25542e7Smilek7 
241*a25542e7Smilek7 		bool blockAllowed = false;
242*a25542e7Smilek7 		if (action == VMAction::MAP)
243*a25542e7Smilek7 			blockAllowed = (level >= fMinBlockLevel && (targetPa & entryMask) == 0);
244*a25542e7Smilek7 		if (action == VMAction::SET_ATTR || action == VMAction::CLEAR_FLAGS)
245*a25542e7Smilek7 			blockAllowed = (MakeTable(ptPa, level, index, NULL) == 0);
246*a25542e7Smilek7 		if (action == VMAction::UNMAP)
247*a25542e7Smilek7 			blockAllowed = true;
248*a25542e7Smilek7 
249*a25542e7Smilek7 		if (blockAllowed) {
250*a25542e7Smilek7 			// Everything is aligned, we can make block mapping there
251*a25542e7Smilek7 			uint64_t* pte = &TableFromPa(ptPa)[index];
252*a25542e7Smilek7 
253*a25542e7Smilek7 		retry:
254*a25542e7Smilek7 			uint64_t oldPte = atomic_get64((int64*) pte);
255*a25542e7Smilek7 
256*a25542e7Smilek7 			if (action == VMAction::MAP || (oldPte & 0x1) != 0) {
257*a25542e7Smilek7 				uint64_t newPte = 0;
258*a25542e7Smilek7 				if (action == VMAction::MAP) {
259*a25542e7Smilek7 					newPte = MakeBlock(targetPa, level, attr);
260*a25542e7Smilek7 				} else if (action == VMAction::SET_ATTR) {
261*a25542e7Smilek7 					newPte = MakeBlock(oldPte & kPteAddrMask, level, MoveAttrFlags(attr, oldPte));
262*a25542e7Smilek7 				} else if (action == VMAction::CLEAR_FLAGS) {
263*a25542e7Smilek7 					newPte = MakeBlock(oldPte & kPteAddrMask, level, ClearAttrFlags(oldPte, attr));
264*a25542e7Smilek7 				} else if (action == VMAction::UNMAP) {
265*a25542e7Smilek7 					newPte = 0;
266*a25542e7Smilek7 					tmp_pte = oldPte;
267*a25542e7Smilek7 				}
268*a25542e7Smilek7 
269*a25542e7Smilek7 				// FIXME: this might not be enough on real hardware with SMP for some cases
270*a25542e7Smilek7 				if ((uint64_t) atomic_test_and_set64((int64*) pte, newPte, oldPte) != oldPte)
271*a25542e7Smilek7 					goto retry;
272*a25542e7Smilek7 
273*a25542e7Smilek7 				if (level < 3 && (oldPte & 0x3) == 0x3) {
274*a25542e7Smilek7 					// If we're replacing existing pagetable clean it up
275*a25542e7Smilek7 					FreeTable(oldPte & kPteAddrMask, level);
276*a25542e7Smilek7 				}
277*a25542e7Smilek7 			}
278*a25542e7Smilek7 		} else {
279*a25542e7Smilek7 			// Otherwise handle mapping in next-level table
280*a25542e7Smilek7 			phys_addr_t table = MakeTable(ptPa, level, index, reservation);
281*a25542e7Smilek7 			MapRange(table, level + 1, nextVa, targetPa, entrySize, action, attr, reservation);
282*a25542e7Smilek7 		}
283*a25542e7Smilek7 		nextVa += entrySize;
284*a25542e7Smilek7 	}
285*a25542e7Smilek7 
286*a25542e7Smilek7 	// Handle misaligned tail area (or entirety of small area) in next-level table
287*a25542e7Smilek7 	if (nextVa < end) {
288*a25542e7Smilek7 		index = (nextVa >> shift) & tableMask;
289*a25542e7Smilek7 		phys_addr_t table = MakeTable(ptPa, level, index, reservation);
290*a25542e7Smilek7 		MapRange(
291*a25542e7Smilek7 			table, level + 1, nextVa, pa + (nextVa - va), end - nextVa, action, attr, reservation);
292*a25542e7Smilek7 	}
293*a25542e7Smilek7 }
294*a25542e7Smilek7 
295*a25542e7Smilek7 
296*a25542e7Smilek7 uint8_t
297*a25542e7Smilek7 VMSAv8TranslationMap::MairIndex(uint8_t type)
298*a25542e7Smilek7 {
299*a25542e7Smilek7 	for (int i = 0; i < 8; i++)
300*a25542e7Smilek7 		if (((fMair >> (i * 8)) & 0xff) == type)
301*a25542e7Smilek7 			return i;
302*a25542e7Smilek7 
303*a25542e7Smilek7 	panic("MAIR entry not found");
304*a25542e7Smilek7 	return 0;
305*a25542e7Smilek7 }
306*a25542e7Smilek7 
307*a25542e7Smilek7 
308*a25542e7Smilek7 uint64_t
309*a25542e7Smilek7 VMSAv8TranslationMap::ClearAttrFlags(uint64_t attr, uint32 flags)
310*a25542e7Smilek7 {
311*a25542e7Smilek7 	attr &= kPteAttrMask;
312*a25542e7Smilek7 
313*a25542e7Smilek7 	if ((flags & PAGE_ACCESSED) != 0)
314*a25542e7Smilek7 		attr &= ~kAttrAF;
315*a25542e7Smilek7 
316*a25542e7Smilek7 	if ((flags & PAGE_MODIFIED) != 0 && (attr & kAttrSWDBM) != 0)
317*a25542e7Smilek7 		attr |= kAttrAP2;
318*a25542e7Smilek7 
319*a25542e7Smilek7 	return attr;
320*a25542e7Smilek7 }
321*a25542e7Smilek7 
322*a25542e7Smilek7 
323*a25542e7Smilek7 uint64_t
324*a25542e7Smilek7 VMSAv8TranslationMap::MoveAttrFlags(uint64_t newAttr, uint64_t oldAttr)
325*a25542e7Smilek7 {
326*a25542e7Smilek7 	if ((oldAttr & kAttrAF) != 0)
327*a25542e7Smilek7 		newAttr |= kAttrAF;
328*a25542e7Smilek7 	if (((newAttr & oldAttr) & kAttrSWDBM) != 0 && (oldAttr & kAttrAP2) == 0)
329*a25542e7Smilek7 		newAttr &= ~kAttrAP2;
330*a25542e7Smilek7 
331*a25542e7Smilek7 	return newAttr;
332*a25542e7Smilek7 }
333*a25542e7Smilek7 
334*a25542e7Smilek7 
335*a25542e7Smilek7 uint64_t
336*a25542e7Smilek7 VMSAv8TranslationMap::GetMemoryAttr(uint32 attributes, uint32 memoryType, bool isKernel)
337*a25542e7Smilek7 {
338*a25542e7Smilek7 	uint64_t attr = 0;
339*a25542e7Smilek7 
340*a25542e7Smilek7 	if (!isKernel)
341*a25542e7Smilek7 		attr |= kAttrNG;
342*a25542e7Smilek7 
343*a25542e7Smilek7 	if ((attributes & B_EXECUTE_AREA) == 0)
344*a25542e7Smilek7 		attr |= kAttrUXN;
345*a25542e7Smilek7 	if ((attributes & B_KERNEL_EXECUTE_AREA) == 0)
346*a25542e7Smilek7 		attr |= kAttrPXN;
347*a25542e7Smilek7 
348*a25542e7Smilek7 	if ((attributes & B_READ_AREA) == 0) {
349*a25542e7Smilek7 		attr |= kAttrAP2;
350*a25542e7Smilek7 		if ((attributes & B_KERNEL_WRITE_AREA) != 0)
351*a25542e7Smilek7 			attr |= kAttrSWDBM;
352*a25542e7Smilek7 	} else {
353*a25542e7Smilek7 		attr |= kAttrAP2 | kAttrAP1;
354*a25542e7Smilek7 		if ((attributes & B_WRITE_AREA) != 0)
355*a25542e7Smilek7 			attr |= kAttrSWDBM;
356*a25542e7Smilek7 	}
357*a25542e7Smilek7 
358*a25542e7Smilek7 	if ((fHwFeature & HW_DIRTY) != 0 && (attr & kAttrSWDBM))
359*a25542e7Smilek7 		attr |= kAttrDBM;
360*a25542e7Smilek7 
361*a25542e7Smilek7 	attr |= kAttrSH1 | kAttrSH0;
362*a25542e7Smilek7 
363*a25542e7Smilek7 	attr |= MairIndex(MAIR_NORMAL_WB) << 2;
364*a25542e7Smilek7 
365*a25542e7Smilek7 	return attr;
366*a25542e7Smilek7 }
367*a25542e7Smilek7 
368*a25542e7Smilek7 
369*a25542e7Smilek7 status_t
370*a25542e7Smilek7 VMSAv8TranslationMap::Map(addr_t va, phys_addr_t pa, uint32 attributes, uint32 memoryType,
371*a25542e7Smilek7 	vm_page_reservation* reservation)
372*a25542e7Smilek7 {
373*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
374*a25542e7Smilek7 
375*a25542e7Smilek7 	uint64_t pageMask = (1UL << fPageBits) - 1;
376*a25542e7Smilek7 	uint64_t vaMask = (1UL << fVaBits) - 1;
377*a25542e7Smilek7 
378*a25542e7Smilek7 	ASSERT((va & pageMask) == 0);
379*a25542e7Smilek7 	ASSERT((pa & pageMask) == 0);
380*a25542e7Smilek7 	ASSERT(ValidateVa(va));
381*a25542e7Smilek7 
382*a25542e7Smilek7 	uint64_t attr = GetMemoryAttr(attributes, memoryType, fIsKernel);
383*a25542e7Smilek7 
384*a25542e7Smilek7 	if (!fPageTable) {
385*a25542e7Smilek7 		vm_page* page = vm_page_allocate_page(reservation, PAGE_STATE_WIRED | VM_PAGE_ALLOC_CLEAR);
386*a25542e7Smilek7 		fPageTable = page->physical_page_number << fPageBits;
387*a25542e7Smilek7 	}
388*a25542e7Smilek7 
389*a25542e7Smilek7 	MapRange(
390*a25542e7Smilek7 		fPageTable, fInitialLevel, va & vaMask, pa, B_PAGE_SIZE, VMAction::MAP, attr, reservation);
391*a25542e7Smilek7 
392*a25542e7Smilek7 	return B_OK;
393*a25542e7Smilek7 }
394*a25542e7Smilek7 
395*a25542e7Smilek7 
396*a25542e7Smilek7 status_t
397*a25542e7Smilek7 VMSAv8TranslationMap::Unmap(addr_t start, addr_t end)
398*a25542e7Smilek7 {
399*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
400*a25542e7Smilek7 
401*a25542e7Smilek7 	size_t size = end - start + 1;
402*a25542e7Smilek7 
403*a25542e7Smilek7 	uint64_t pageMask = (1UL << fPageBits) - 1;
404*a25542e7Smilek7 	uint64_t vaMask = (1UL << fVaBits) - 1;
405*a25542e7Smilek7 
406*a25542e7Smilek7 	ASSERT((start & pageMask) == 0);
407*a25542e7Smilek7 	ASSERT((size & pageMask) == 0);
408*a25542e7Smilek7 	ASSERT(ValidateVa(start));
409*a25542e7Smilek7 
410*a25542e7Smilek7 	MapRange(fPageTable, fInitialLevel, start & vaMask, 0, size, VMAction::UNMAP, 0, NULL);
411*a25542e7Smilek7 
412*a25542e7Smilek7 	return B_OK;
413*a25542e7Smilek7 }
414*a25542e7Smilek7 
415*a25542e7Smilek7 
416*a25542e7Smilek7 status_t
417*a25542e7Smilek7 VMSAv8TranslationMap::UnmapPage(VMArea* area, addr_t address, bool updatePageQueue)
418*a25542e7Smilek7 {
419*a25542e7Smilek7 
420*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
421*a25542e7Smilek7 	RecursiveLocker locker(fLock);
422*a25542e7Smilek7 
423*a25542e7Smilek7 	// TODO: replace this kludge
424*a25542e7Smilek7 
425*a25542e7Smilek7 	phys_addr_t pa;
426*a25542e7Smilek7 	uint64_t pte;
427*a25542e7Smilek7 	if (!WalkTable(fPageTable, fInitialLevel, address, &pa, &pte))
428*a25542e7Smilek7 		return B_ENTRY_NOT_FOUND;
429*a25542e7Smilek7 
430*a25542e7Smilek7 	uint64_t vaMask = (1UL << fVaBits) - 1;
431*a25542e7Smilek7 	MapRange(fPageTable, fInitialLevel, address & vaMask, 0, B_PAGE_SIZE, VMAction::UNMAP, 0, NULL);
432*a25542e7Smilek7 
433*a25542e7Smilek7 	pinner.Unlock();
434*a25542e7Smilek7 	locker.Detach();
435*a25542e7Smilek7 	PageUnmapped(area, pa >> fPageBits, (tmp_pte & kAttrAF) != 0, (tmp_pte & kAttrAP2) == 0,
436*a25542e7Smilek7 		updatePageQueue);
437*a25542e7Smilek7 
438*a25542e7Smilek7 	return B_OK;
439*a25542e7Smilek7 }
440*a25542e7Smilek7 
441*a25542e7Smilek7 
442*a25542e7Smilek7 bool
443*a25542e7Smilek7 VMSAv8TranslationMap::WalkTable(
444*a25542e7Smilek7 	phys_addr_t ptPa, int level, addr_t va, phys_addr_t* pa, uint64_t* rpte)
445*a25542e7Smilek7 {
446*a25542e7Smilek7 	int tableBits = fPageBits - 3;
447*a25542e7Smilek7 	uint64_t tableMask = (1UL << tableBits) - 1;
448*a25542e7Smilek7 
449*a25542e7Smilek7 	int shift = tableBits * (3 - level) + fPageBits;
450*a25542e7Smilek7 	uint64_t entrySize = 1UL << shift;
451*a25542e7Smilek7 	uint64_t entryMask = entrySize - 1;
452*a25542e7Smilek7 
453*a25542e7Smilek7 	int index = (va >> shift) & tableMask;
454*a25542e7Smilek7 
455*a25542e7Smilek7 	uint64_t pte = TableFromPa(ptPa)[index];
456*a25542e7Smilek7 	int type = pte & 0x3;
457*a25542e7Smilek7 
458*a25542e7Smilek7 	if ((type & 0x1) == 0)
459*a25542e7Smilek7 		return false;
460*a25542e7Smilek7 
461*a25542e7Smilek7 	uint64_t addr = pte & kPteAddrMask;
462*a25542e7Smilek7 	if (level < 3) {
463*a25542e7Smilek7 		if (type == 0x3) {
464*a25542e7Smilek7 			return WalkTable(addr, level + 1, va, pa, rpte);
465*a25542e7Smilek7 		} else {
466*a25542e7Smilek7 			*pa = addr | (va & entryMask);
467*a25542e7Smilek7 			*rpte = pte;
468*a25542e7Smilek7 		}
469*a25542e7Smilek7 	} else {
470*a25542e7Smilek7 		ASSERT(type == 0x3);
471*a25542e7Smilek7 		*pa = addr;
472*a25542e7Smilek7 		*rpte = pte;
473*a25542e7Smilek7 	}
474*a25542e7Smilek7 
475*a25542e7Smilek7 	return true;
476*a25542e7Smilek7 }
477*a25542e7Smilek7 
478*a25542e7Smilek7 
479*a25542e7Smilek7 bool
480*a25542e7Smilek7 VMSAv8TranslationMap::ValidateVa(addr_t va)
481*a25542e7Smilek7 {
482*a25542e7Smilek7 	uint64_t vaMask = (1UL << fVaBits) - 1;
483*a25542e7Smilek7 	bool kernelAddr = (va & (1UL << 63)) != 0;
484*a25542e7Smilek7 	if (kernelAddr != fIsKernel)
485*a25542e7Smilek7 		return false;
486*a25542e7Smilek7 	if ((va & ~vaMask) != (fIsKernel ? ~vaMask : 0))
487*a25542e7Smilek7 		return false;
488*a25542e7Smilek7 	return true;
489*a25542e7Smilek7 }
490*a25542e7Smilek7 
491*a25542e7Smilek7 
492*a25542e7Smilek7 status_t
493*a25542e7Smilek7 VMSAv8TranslationMap::Query(addr_t va, phys_addr_t* pa, uint32* flags)
494*a25542e7Smilek7 {
495*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
496*a25542e7Smilek7 
497*a25542e7Smilek7 	ASSERT(ValidateVa(va));
498*a25542e7Smilek7 
499*a25542e7Smilek7 	uint64_t pte = 0;
500*a25542e7Smilek7 	bool ret = WalkTable(fPageTable, fInitialLevel, va, pa, &pte);
501*a25542e7Smilek7 
502*a25542e7Smilek7 	uint32 result = 0;
503*a25542e7Smilek7 
504*a25542e7Smilek7 	if (ret) {
505*a25542e7Smilek7 		result |= PAGE_PRESENT;
506*a25542e7Smilek7 
507*a25542e7Smilek7 		if ((pte & kAttrAF) != 0)
508*a25542e7Smilek7 			result |= PAGE_ACCESSED;
509*a25542e7Smilek7 		if ((pte & kAttrAP2) == 0)
510*a25542e7Smilek7 			result |= PAGE_MODIFIED;
511*a25542e7Smilek7 
512*a25542e7Smilek7 		if ((pte & kAttrUXN) == 0)
513*a25542e7Smilek7 			result |= B_EXECUTE_AREA;
514*a25542e7Smilek7 		if ((pte & kAttrPXN) == 0)
515*a25542e7Smilek7 			result |= B_KERNEL_EXECUTE_AREA;
516*a25542e7Smilek7 
517*a25542e7Smilek7 		result |= B_KERNEL_READ_AREA;
518*a25542e7Smilek7 
519*a25542e7Smilek7 		if ((pte & kAttrAP1) != 0)
520*a25542e7Smilek7 			result |= B_READ_AREA;
521*a25542e7Smilek7 
522*a25542e7Smilek7 		if ((pte & kAttrAP2) == 0 || (pte & kAttrSWDBM) != 0) {
523*a25542e7Smilek7 			result |= B_KERNEL_WRITE_AREA;
524*a25542e7Smilek7 
525*a25542e7Smilek7 			if ((pte & kAttrAP1) != 0)
526*a25542e7Smilek7 				result |= B_WRITE_AREA;
527*a25542e7Smilek7 		}
528*a25542e7Smilek7 	}
529*a25542e7Smilek7 
530*a25542e7Smilek7 	*flags = result;
531*a25542e7Smilek7 	return B_OK;
532*a25542e7Smilek7 }
533*a25542e7Smilek7 
534*a25542e7Smilek7 
535*a25542e7Smilek7 status_t
536*a25542e7Smilek7 VMSAv8TranslationMap::QueryInterrupt(
537*a25542e7Smilek7 	addr_t virtualAddress, phys_addr_t* _physicalAddress, uint32* _flags)
538*a25542e7Smilek7 {
539*a25542e7Smilek7 	return Query(virtualAddress, _physicalAddress, _flags);
540*a25542e7Smilek7 }
541*a25542e7Smilek7 
542*a25542e7Smilek7 
543*a25542e7Smilek7 status_t
544*a25542e7Smilek7 VMSAv8TranslationMap::Protect(addr_t start, addr_t end, uint32 attributes, uint32 memoryType)
545*a25542e7Smilek7 {
546*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
547*a25542e7Smilek7 
548*a25542e7Smilek7 	size_t size = end - start + 1;
549*a25542e7Smilek7 
550*a25542e7Smilek7 	uint64_t pageMask = (1UL << fPageBits) - 1;
551*a25542e7Smilek7 	uint64_t vaMask = (1UL << fVaBits) - 1;
552*a25542e7Smilek7 
553*a25542e7Smilek7 	ASSERT((start & pageMask) == 0);
554*a25542e7Smilek7 	ASSERT((size & pageMask) == 0);
555*a25542e7Smilek7 	ASSERT(ValidateVa(start));
556*a25542e7Smilek7 
557*a25542e7Smilek7 	uint64_t attr = GetMemoryAttr(attributes, memoryType, fIsKernel);
558*a25542e7Smilek7 	MapRange(fPageTable, fInitialLevel, start & vaMask, 0, size, VMAction::SET_ATTR, attr, NULL);
559*a25542e7Smilek7 
560*a25542e7Smilek7 	return B_OK;
561*a25542e7Smilek7 }
562*a25542e7Smilek7 
563*a25542e7Smilek7 
564*a25542e7Smilek7 status_t
565*a25542e7Smilek7 VMSAv8TranslationMap::ClearFlags(addr_t va, uint32 flags)
566*a25542e7Smilek7 {
567*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
568*a25542e7Smilek7 
569*a25542e7Smilek7 	uint64_t pageMask = (1UL << fPageBits) - 1;
570*a25542e7Smilek7 	uint64_t vaMask = (1UL << fVaBits) - 1;
571*a25542e7Smilek7 
572*a25542e7Smilek7 	ASSERT((va & pageMask) == 0);
573*a25542e7Smilek7 	ASSERT(ValidateVa(va));
574*a25542e7Smilek7 
575*a25542e7Smilek7 	MapRange(
576*a25542e7Smilek7 		fPageTable, fInitialLevel, va & vaMask, 0, B_PAGE_SIZE, VMAction::CLEAR_FLAGS, flags, NULL);
577*a25542e7Smilek7 
578*a25542e7Smilek7 	return B_OK;
579*a25542e7Smilek7 }
580*a25542e7Smilek7 
581*a25542e7Smilek7 
582*a25542e7Smilek7 bool
583*a25542e7Smilek7 VMSAv8TranslationMap::ClearAccessedAndModified(
584*a25542e7Smilek7 	VMArea* area, addr_t address, bool unmapIfUnaccessed, bool& _modified)
585*a25542e7Smilek7 {
586*a25542e7Smilek7 	panic("VMSAv8TranslationMap::ClearAccessedAndModified not implemented\n");
587*a25542e7Smilek7 	return B_OK;
588*a25542e7Smilek7 }
589*a25542e7Smilek7 
590*a25542e7Smilek7 
591*a25542e7Smilek7 void
592*a25542e7Smilek7 VMSAv8TranslationMap::Flush()
593*a25542e7Smilek7 {
594*a25542e7Smilek7 	ThreadCPUPinner pinner(thread_get_current_thread());
595*a25542e7Smilek7 
596*a25542e7Smilek7 	arch_cpu_global_TLB_invalidate();
597*a25542e7Smilek7 }
598