if
(vmexit_reason
=
=
VMX_EXIT_REASON_EPT_VIOLATION) {
vmx_exit_qualification_ept_violation QualificationEptViolation
=
{
0
};
__vmx_vmread(VMCS_EXIT_QUALIFICATION, (size_t
*
)&QualificationEptViolation);
auto Physical
=
0ui64
;
if
(QualificationEptViolation.caused_by_translation)
__vmx_vmread(VMCS_GUEST_PHYSICAL_ADDRESS, (size_t
*
)&Physical);
else
__vmx_vmread(VMCS_EXIT_GUEST_LINEAR_ADDRESS, (size_t
*
)&Physical);
ept_pointer ept
=
{
0
};
__vmx_vmread(VMCS_CTRL_EPT_POINTER, (size_t
*
)&ept);
mm::phys_addr_t phy
=
{ Physical };
auto pml4
=
(ept_pml4e
*
)mm::map_page(ept.page_frame_number <<
12
, mm::map_type_t::map_src);
auto pdpte
=
(ept_pdpte
*
)mm::map_page(pml4[phy.pml4_index].page_frame_number <<
12
, mm::map_type_t::map_src);
auto pde2mb
=
(epde_2mb
*
)mm::map_page(pdpte[phy.pdpt_index].page_frame_number <<
12
, mm::map_type_t::map_src);
if
(!pde2mb[phy.pd_index].large_page) {
auto pte
=
(ept_pte
*
)mm::map_page(((ept_pde
*
)pde2mb)[phy.pd_index].page_frame_number <<
12
, mm::map_type_t::map_src);
if
(pte) {
auto pfn_1
=
0ui64
;
auto pfn_2
=
0ui64
;
SpinlockLock(&mm::SpinLock);
PLIST_ENTRY Entry
=
mm::ListHead.Flink;
while
(Entry !
=
&mm::ListHead) {
PLIST_ENTRY NextEntry
=
Entry
-
>Flink;
mm::HOOK_INFO
*
data
=
CONTAINING_RECORD(Entry, mm::HOOK_INFO,
List
);
if
(data
-
>Physical_1
=
=
(Physical & ~
0xFFF
)) {
pfn_1
=
data
-
>Physical_1 >>
12
;
pfn_2
=
data
-
>Physical_2 >>
12
;
break
;
}
Entry
=
NextEntry;
}
SpinlockUnlock(&mm::SpinLock);
if
(pfn_1 && pfn_2) {
if
(QualificationEptViolation.execute_access) {
pte[phy.pt_index].read_access
=
0
;
pte[phy.pt_index].write_access
=
0
;
pte[phy.pt_index].execute_access
=
1
;
pte[phy.pt_index].page_frame_number
=
pfn_2;
}
else
{
pte[phy.pt_index].read_access
=
1
;
pte[phy.pt_index].write_access
=
1
;
pte[phy.pt_index].execute_access
=
0
;
pte[phy.pt_index].page_frame_number
=
pfn_1;
}
invept_descriptor Descriptor
=
{
0
};
mm::AsmInvept(invept_all_context, &Descriptor);
return
;
}
}
}
}