1 ; Copyright (c) 2015-2017, Satoshi Tanda. All rights reserved.
2 ; Use of this source code is governed by a MIT-style license that can be
3 ; found in the LICENSE file.
6 ; This module implements all assembler code
13 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
15 ; References to C functions
17 EXTERN VmmVmExitHandler@4 : PROC
18 EXTERN VmmVmxFailureHandler@4 : PROC
19 EXTERN UtilDumpGpRegisters@8 : PROC
21 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
28 VMX_ERROR_WITH_STATUS EQU 1
29 VMX_ERROR_WITHOUT_STATUS EQU 2
31 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
36 ; Dumps all general purpose registers and a flag register.
37 ASM_DUMP_REGISTERS MACRO
40 mov ecx, esp ; all_regs
42 add edx, 4*9 ; stack_pointer
46 call UtilDumpGpRegisters@8 ; UtilDumpGpRegisters(all_regs, stack_pointer);
53 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
59 ; bool __stdcall AsmInitializeVm(
60 ; _In_ void (*vm_initialization_routine)(_In_ ULONG_PTR, _In_ ULONG_PTR,
62 ; _In_opt_ void *context);
63 AsmInitializeVm PROC vm_initialization_routine, context
69 ; vm_initialization_routine(rsp, asmResumeVm, context)
73 call vm_initialization_routine
77 xor eax, eax ; return false
80 ; This is where the virtualized guest start to execute after successful
83 nop ; keep this nop for ease of debugging
92 ; void __stdcall AsmVmmEntryPoint();
94 ; No need to save the flag registers since it is restored from the VMCS at
95 ; the time of vmresume.
99 ; save volatile XMM registers
100 sub esp, 68h ; +8 for alignment
102 mov edx, ecx ; save original CR0
103 and cl, 0f1h ; clear MP, EM, TS bits for floating point access
104 mov cr0, ecx ; update CR0
105 movaps xmmword ptr [esp + 0h], xmm0
106 movaps xmmword ptr [esp + 10h], xmm1
107 movaps xmmword ptr [esp + 20h], xmm2
108 movaps xmmword ptr [esp + 30h], xmm3
109 movaps xmmword ptr [esp + 40h], xmm4
110 movaps xmmword ptr [esp + 50h], xmm5
111 mov cr0, edx ; restore the original CR0
114 call VmmVmExitHandler@4 ; bool vm_continue = VmmVmExitHandler(guest_context);
116 ; restore XMM registers
118 mov edx, ecx ; save original CR0
119 and cl, 0f1h ; clear MP, EM, TS bits for floating point access
120 mov cr0, ecx ; update CR0
121 movaps xmm0, xmmword ptr [esp + 0h]
122 movaps xmm1, xmmword ptr [esp + 10h]
123 movaps xmm2, xmmword ptr [esp + 20h]
124 movaps xmm3, xmmword ptr [esp + 30h]
125 movaps xmm4, xmmword ptr [esp + 40h]
126 movaps xmm5, xmmword ptr [esp + 50h]
127 mov cr0, edx ; restore the original CR0
128 add esp, 68h ; +8 for alignment
131 jz exitVm ; if (!vm_continue) jmp exitVm
138 ; Executes vmxoff and ends virtualization
139 ; eax = Guest's eflags
141 ; ecx = Guest's eip for the next instruction
144 jz vmxError ; if (ZF) jmp
145 jc vmxError ; if (CF) jmp
147 popfd ; eflags <= GurstFlags
148 mov esp, edx ; esp <= GuestRsp
150 ret ; jmp AddressToReturn
153 ; Diagnose a critical error
156 mov ecx, esp ; all_regs
158 call VmmVmxFailureHandler@4 ; VmmVmxFailureHandler(all_regs);
160 AsmVmmEntryPoint ENDP
162 ; unsigned char __stdcall AsmVmxCall(_In_ ULONG_PTR hypercall_number,
163 ; _In_opt_ void *context);
164 AsmVmxCall PROC hypercall_number, context
165 mov ecx, hypercall_number
167 vmcall ; vmcall(hypercall_number, context)
168 jz errorWithCode ; if (ZF) jmp
169 jc errorWithoutCode ; if (CF) jmp
170 xor eax, eax ; return VMX_OK
174 mov eax, VMX_ERROR_WITHOUT_STATUS
178 mov eax, VMX_ERROR_WITH_STATUS
182 ; void __stdcall AsmWriteGDT(_In_ const GDTR *gdtr);
183 AsmWriteGDT PROC gdtr
189 ; void __stdcall AsmReadGDT(_Out_ GDTR *gdtr);
196 ; void __stdcall AsmWriteLDTR(_In_ USHORT local_segmeng_selector);
197 AsmWriteLDTR PROC local_segmeng_selector
198 mov ecx, local_segmeng_selector
203 ; USHORT __stdcall AsmReadLDTR();
209 ; void __stdcall AsmWriteTR(_In_ USHORT task_register);
210 AsmWriteTR PROC task_register
211 mov ecx, task_register
216 ; USHORT __stdcall AsmReadTR();
222 ; void __stdcall AsmWriteES(_In_ USHORT segment_selector);
223 AsmWriteES PROC segment_selector
224 mov ecx, segment_selector
229 ; USHORT __stdcall AsmReadES();
235 ; void __stdcall AsmWriteCS(_In_ USHORT segment_selector);
236 AsmWriteCS PROC segment_selector
237 mov ecx, segment_selector
242 ; USHORT __stdcall AsmReadCS();
248 ; void __stdcall AsmWriteSS(_In_ USHORT segment_selector);
249 AsmWriteSS PROC segment_selector
250 mov ecx, segment_selector
255 ; USHORT __stdcall AsmReadSS();
261 ; void __stdcall AsmWriteDS(_In_ USHORT segment_selector);
262 AsmWriteDS PROC segment_selector
263 mov ecx, segment_selector
268 ; USHORT __stdcall AsmReadDS();
274 ; void __stdcall AsmWriteFS(_In_ USHORT segment_selector);
275 AsmWriteFS PROC segment_selector
276 mov ecx, segment_selector
281 ; USHORT __stdcall AsmReadFS();
287 ; void __stdcall AsmWriteGS(_In_ USHORT segment_selector);
288 AsmWriteGS PROC segment_selector
289 mov ecx, segment_selector
294 ; USHORT __stdcall AsmReadGS();
300 ; ULONG_PTR __stdcall AsmLoadAccessRightsByte(
301 ; _In_ ULONG_PTR segment_selector);
302 AsmLoadAccessRightsByte PROC segment_selector
303 mov ecx, segment_selector
306 AsmLoadAccessRightsByte ENDP
308 ; void __stdcall AsmInvalidateInternalCaches();
309 AsmInvalidateInternalCaches PROC
312 AsmInvalidateInternalCaches ENDP
314 ; void __stdcall AsmWriteCR2(_In_ ULONG_PTR cr2_value);
315 AsmWriteCR2 PROC cr2_value
321 ; unsigned char __stdcall AsmInvept(
322 ; _In_ InvEptType invept_type,
323 ; _In_ const InvEptDescriptor *invept_descriptor);
324 AsmInvept PROC invept_type, invept_descriptor
326 mov edx, invept_descriptor
327 ; invept ecx, oword ptr [edx]
328 db 66h, 0fh, 38h, 80h, 0ah
329 jz errorWithCode ; if (ZF) jmp
330 jc errorWithoutCode ; if (CF) jmp
331 xor eax, eax ; return VMX_OK
335 mov eax, VMX_ERROR_WITHOUT_STATUS
339 mov eax, VMX_ERROR_WITH_STATUS
343 ; unsigned char __stdcall AsmInvvpid(
344 ; _In_ InvVpidType invvpid_type,
345 ; _In_ const InvVpidDescriptor *invvpid_descriptor);
346 AsmInvvpid PROC invvpid_type, invvpid_descriptor
347 mov ecx, invvpid_type
348 mov edx, invvpid_descriptor
349 ; invvpid ecx, oword ptr [rdx]
350 db 66h, 0fh, 38h, 81h, 0ah
351 jz errorWithCode ; if (ZF) jmp
352 jc errorWithoutCode ; if (CF) jmp
353 xor eax, eax ; return VMX_OK
357 mov eax, VMX_ERROR_WITHOUT_STATUS
361 mov eax, VMX_ERROR_WITH_STATUS
366 PURGE ASM_DUMP_REGISTERS