Accessed | VirtualAddressSpace | static |
allocateStack() | X64VirtualAddressSpace | virtual |
allocateStack(size_t stackSz) | X64VirtualAddressSpace | virtual |
CacheDisable | VirtualAddressSpace | static |
ClearDirty | VirtualAddressSpace | static |
clone(bool copyOnWrite=true) | X64VirtualAddressSpace | virtual |
conditionalTableEntryAllocation(uint64_t *tableEntry, uint64_t flags) | X64VirtualAddressSpace | private |
conditionalTableEntryMapping(uint64_t *tableEntry, uint64_t physAddress, uint64_t flags) | X64VirtualAddressSpace | private |
CopyOnWrite | VirtualAddressSpace | static |
create() | VirtualAddressSpace | static |
Dirty | VirtualAddressSpace | static |
doAllocateStack(size_t sSize) | X64VirtualAddressSpace | private |
Execute | VirtualAddressSpace | static |
expandHeap(ssize_t incr, size_t flags) | VirtualAddressSpace | virtual |
freeStack(Stack *pStack) | X64VirtualAddressSpace | virtual |
fromFlags(uint64_t Flags, bool bFinal=false) const PURE | X64VirtualAddressSpace | private |
getDynamicEnd() const | X64VirtualAddressSpace | inlinevirtual |
getDynamicLinkerAddress() const | X64VirtualAddressSpace | inlinevirtual |
getDynamicStart() const | X64VirtualAddressSpace | inlinevirtual |
getEndOfHeap() | X64VirtualAddressSpace | virtual |
getGlobalInfoBlock() const | X64VirtualAddressSpace | inlinevirtual |
getKernelAddressSpace() | VirtualAddressSpace | static |
getKernelCacheEnd() const | X64VirtualAddressSpace | inlinevirtual |
getKernelCacheStart() const | X64VirtualAddressSpace | inlinevirtual |
getKernelEventBlockStart() const | X64VirtualAddressSpace | inlinevirtual |
getKernelHeapEnd() const | X64VirtualAddressSpace | inlinevirtual |
getKernelHeapStart() const | X64VirtualAddressSpace | inlinevirtual |
getKernelModulesEnd() const | X64VirtualAddressSpace | inlinevirtual |
getKernelModulesStart() const | X64VirtualAddressSpace | inlinevirtual |
getKernelStart() const | X64VirtualAddressSpace | inlinevirtual |
getMapping(void *virtualAddress, physical_uintptr_t &physAddress, size_t &flags) | X64VirtualAddressSpace | virtual |
getPageTableEntry(void *virtualAddress, uint64_t *&pageTableEntry) const | X64VirtualAddressSpace | private |
getUserReservedStart() const | X64VirtualAddressSpace | inlinevirtual |
getUserStart() const | X64VirtualAddressSpace | inlinevirtual |
Guarded | VirtualAddressSpace | static |
isAddressValid(void *virtualAddress) | X64VirtualAddressSpace | virtual |
isMapped(void *virtualAddress) | X64VirtualAddressSpace | virtual |
KernelMode | VirtualAddressSpace | static |
m_bKernelSpace | X64VirtualAddressSpace | private |
m_freeStacks | X64VirtualAddressSpace | private |
m_Heap | VirtualAddressSpace | |
m_HeapEnd | VirtualAddressSpace | |
m_KernelSpace | X64VirtualAddressSpace | privatestatic |
m_Lock | X64VirtualAddressSpace | private |
m_PhysicalPML4 | X64VirtualAddressSpace | private |
m_pStackTop | X64VirtualAddressSpace | private |
m_StacksLock | X64VirtualAddressSpace | private |
m_ZeroPage | VirtualAddressSpace | static |
map(physical_uintptr_t physAddress, void *virtualAddress, size_t flags) | X64VirtualAddressSpace | virtual |
mapHuge(physical_uintptr_t physAddress, void *virtualAddress, size_t count, size_t flags) | X64VirtualAddressSpace | virtual |
mapPageStructures(physical_uintptr_t physAddress, void *virtualAddress, size_t flags) | X64VirtualAddressSpace | |
mapPageStructuresAbove4GB(physical_uintptr_t physAddress, void *virtualAddress, size_t flags) (defined in X64VirtualAddressSpace) | X64VirtualAddressSpace | |
mapUnlocked(physical_uintptr_t physAddress, void *virtualAddress, size_t flags, bool locked=false) | X64VirtualAddressSpace | private |
maybeFreeTables(void *virtualAddress) | X64VirtualAddressSpace | private |
memIsInHeap(void *pMem) | X64VirtualAddressSpace | virtual |
memIsInKernelHeap(void *pMem) | X64VirtualAddressSpace | virtual |
MemoryCoherent | VirtualAddressSpace | static |
Multiprocessor class | X64VirtualAddressSpace | friend |
operator=(const X64VirtualAddressSpace &) | X64VirtualAddressSpace | private |
Processor class | X64VirtualAddressSpace | friend |
revertToKernelAddressSpace() | X64VirtualAddressSpace | virtual |
setFlags(void *virtualAddress, size_t newFlags) | X64VirtualAddressSpace | virtual |
setHeap(void *heap, void *heapEnd) | VirtualAddressSpace | inline |
Shared | VirtualAddressSpace | static |
Swapped | VirtualAddressSpace | static |
toFlags(size_t flags, bool bFinal=false) const PURE | X64VirtualAddressSpace | private |
unmap(void *virtualAddress) | X64VirtualAddressSpace | virtual |
unmapUnlocked(void *virtualAddress, bool requireMapped=true) | X64VirtualAddressSpace | private |
VirtualAddressSpace(void *Heap) | VirtualAddressSpace | inlineprotected |
VirtualAddressSpace::create() | X64VirtualAddressSpace | friend |
VirtualAddressSpace::getKernelAddressSpace() | X64VirtualAddressSpace | friend |
Write | VirtualAddressSpace | static |
WriteCombine | VirtualAddressSpace | static |
WriteThrough | VirtualAddressSpace | static |
X64VirtualAddressSpace() | X64VirtualAddressSpace | private |
X64VirtualAddressSpace(void *Heap, physical_uintptr_t PhysicalPML4, void *VirtualStack) | X64VirtualAddressSpace | private |
X64VirtualAddressSpace(const X64VirtualAddressSpace &) | X64VirtualAddressSpace | private |
~VirtualAddressSpace() | VirtualAddressSpace | inlinevirtual |
~X64VirtualAddressSpace() | X64VirtualAddressSpace | virtual |