+ARM64_TTE_SIZE = 8
+ARM64_VMADDR_BITS = 48
+
+def PmapBlockOffsetMaskARM64(level):
+ assert level >= 1 and level <= 3
+ page_size = kern.globals.page_size
+ ttentries = (page_size / ARM64_TTE_SIZE)
+ return page_size * (ttentries ** (3 - level)) - 1
+
+def PmapBlockBaseMaskARM64(level):
+ assert level >= 1 and level <= 3
+ page_size = kern.globals.page_size
+ return ((1 << ARM64_VMADDR_BITS) - 1) & ~PmapBlockOffsetMaskARM64(level)
+
+def PmapIndexMaskARM64(level):
+ assert level >= 1 and level <= 3
+ page_size = kern.globals.page_size
+ ttentries = (page_size / ARM64_TTE_SIZE)
+ return page_size * (ttentries ** (3 - level) * (ttentries - 1))
+
+def PmapIndexDivideARM64(level):
+ assert level >= 1 and level <= 3
+ page_size = kern.globals.page_size
+ ttentries = (page_size / ARM64_TTE_SIZE)
+ return page_size * (ttentries ** (3 - level))
+
+def PmapTTnIndexARM64(vaddr, level):
+ assert(type(vaddr) in (long, int))
+ assert_64bit(vaddr)
+
+ return (vaddr & PmapIndexMaskARM64(level)) // PmapIndexDivideARM64(level)
+
+def PmapDecodeTTEARM64(tte, level):
+ assert(type(tte) == long)
+ assert(type(level) == int)
+ assert_64bit(tte)
+
+ if tte & 0x1 == 0x1:
+ if (tte & 0x2 == 0x2) and (level != 0x3):
+ print "Type = Table pointer."
+ print "Table addr = {:#x}.".format(tte & 0xfffffffff000)
+ print "PXN = {:#x}.".format((tte >> 59) & 0x1)
+ print "XN = {:#x}.".format((tte >> 60) & 0x1)
+ print "AP = {:#x}.".format((tte >> 61) & 0x3)
+ print "NS = {:#x}".format(tte >> 63)
+ else:
+ print "Type = Block."
+ print "AttrIdx = {:#x}.".format((tte >> 2) & 0x7)
+ print "NS = {:#x}.".format((tte >> 5) & 0x1)
+ print "AP = {:#x}.".format((tte >> 6) & 0x3)
+ print "SH = {:#x}.".format((tte >> 8) & 0x3)
+ print "AF = {:#x}.".format((tte >> 10) & 0x1)
+ print "nG = {:#x}.".format((tte >> 11) & 0x1)
+ print "HINT = {:#x}.".format((tte >> 52) & 0x1)
+ print "PXN = {:#x}.".format((tte >> 53) & 0x1)
+ print "XN = {:#x}.".format((tte >> 54) & 0x1)
+ print "SW Use = {:#x}.".format((tte >> 55) & 0xf)
+ else:
+ print "Invalid."
+
+ return
+
+def PmapWalkARM64(pmap, vaddr, verbose_level = vHUMAN):
+ assert(type(pmap) == core.cvalue.value)
+ assert(type(vaddr) in (long, int))
+ page_size = kern.globals.page_size
+ page_offset_mask = (page_size - 1)
+ page_base_mask = ((1 << ARM64_VMADDR_BITS) - 1) & (~page_offset_mask)
+
+ assert_64bit(vaddr)
+ paddr = -1
+
+ tt1_index = PmapTTnIndexARM64(vaddr, 1)
+ tt2_index = PmapTTnIndexARM64(vaddr, 2)
+ tt3_index = PmapTTnIndexARM64(vaddr, 3)
+
+ # L1
+ tte = long(unsigned(pmap.tte[tt1_index]))
+ assert(type(tte) == long)
+ assert_64bit(tte)
+
+ if verbose_level >= vSCRIPT:
+ print "L1 entry: {:#x}".format(tte)
+ if verbose_level >= vDETAIL:
+ PmapDecodeTTEARM64(tte, 1)
+
+ if tte & 0x1 == 0x1:
+ # Check for L1 block entry
+ if tte & 0x2 == 0x0:
+ # Handle L1 block entry
+ paddr = tte & PmapBlockBaseMaskARM64(1)
+ paddr = paddr | (vaddr & PmapBlockOffsetMaskARM64(1))
+ print "phys: {:#x}".format(paddr)
+ else:
+ # Handle L1 table entry
+ l2_phys = (tte & page_base_mask) + (ARM64_TTE_SIZE * tt2_index)
+ assert(type(l2_phys) == long)
+
+ l2_virt = kern.PhysToKernelVirt(l2_phys)
+ assert(type(l2_virt) == long)
+
+ if verbose_level >= vDETAIL:
+ print "L2 physical address: {:#x}. L2 virtual address: {:#x}".format(l2_phys, l2_virt)
+
+ # L2
+ ttep = kern.GetValueFromAddress(l2_virt, "tt_entry_t*")
+ tte = long(unsigned(dereference(ttep)))
+ assert(type(tte) == long)
+
+ if verbose_level >= vSCRIPT:
+ print "L2 entry: {:#0x}".format(tte)
+ if verbose_level >= vDETAIL:
+ PmapDecodeTTEARM64(tte, 2)
+
+ if tte & 0x1 == 0x1:
+ # Check for L2 block entry
+ if tte & 0x2 == 0x0:
+ # Handle L2 block entry
+ paddr = tte & PmapBlockBaseMaskARM64(2)
+ paddr = paddr | (vaddr & PmapBlockOffsetMaskARM64(2))
+ else:
+ # Handle L2 table entry
+ l3_phys = (tte & page_base_mask) + (ARM64_TTE_SIZE * tt3_index)
+ assert(type(l3_phys) == long)
+
+ l3_virt = kern.PhysToKernelVirt(l3_phys)
+ assert(type(l3_virt) == long)
+
+ if verbose_level >= vDETAIL:
+ print "L3 physical address: {:#x}. L3 virtual address: {:#x}".format(l3_phys, l3_virt)
+
+ # L3
+ ttep = kern.GetValueFromAddress(l3_virt, "tt_entry_t*")
+ tte = long(unsigned(dereference(ttep)))
+ assert(type(tte) == long)
+
+ if verbose_level >= vSCRIPT:
+ print "L3 entry: {:#0x}".format(tte)
+ if verbose_level >= vDETAIL:
+ PmapDecodeTTEARM64(tte, 3)
+
+ if tte & 0x3 == 0x3:
+ paddr = tte & page_base_mask
+ paddr = paddr | (vaddr & page_offset_mask)
+ elif verbose_level >= vHUMAN:
+ print "L3 entry invalid: {:#x}\n".format(tte)
+ elif verbose_level >= vHUMAN: # tte & 0x1 == 0x1
+ print "L2 entry invalid: {:#x}\n".format(tte)
+ elif verbose_level >= vHUMAN:
+ print "L1 entry invalid: {:#x}\n".format(tte)
+
+ if verbose_level >= vHUMAN:
+ if paddr:
+ print "Translation of {:#x} is {:#x}.".format(vaddr, paddr)
+ else:
+ print "(no translation)"
+
+ return paddr
+