HyperPlatform Programmer's Reference
x64.asm
Go to the documentation of this file.
1 ; Copyright (c) 2015-2017, Satoshi Tanda. All rights reserved.
2 ; Use of this source code is governed by a MIT-style license that can be
3 ; found in the LICENSE file.
4 
5 ;
6 ; This module implements all assembler code
7 ;
8 
9 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
10 ;
11 ; References to C functions
12 ;
13 EXTERN VmmVmExitHandler : PROC
14 EXTERN VmmVmxFailureHandler : PROC
15 EXTERN UtilDumpGpRegisters : PROC
16 
17 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
18 ;
19 ; constants
20 ;
21 .CONST
22 
23 VMX_OK EQU 0
24 VMX_ERROR_WITH_STATUS EQU 1
25 VMX_ERROR_WITHOUT_STATUS EQU 2
26 
27 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
28 ;
29 ; macros
30 ;
31 
32 ; Saves all general purpose registers to the stack
33 PUSHAQ MACRO
34  push rax
35  push rcx
36  push rdx
37  push rbx
38  push -1 ; dummy for rsp
39  push rbp
40  push rsi
41  push rdi
42  push r8
43  push r9
44  push r10
45  push r11
46  push r12
47  push r13
48  push r14
49  push r15
50 ENDM
51 
52 ; Loads all general purpose registers from the stack
53 POPAQ MACRO
54  pop r15
55  pop r14
56  pop r13
57  pop r12
58  pop r11
59  pop r10
60  pop r9
61  pop r8
62  pop rdi
63  pop rsi
64  pop rbp
65  add rsp, 8 ; dummy for rsp
66  pop rbx
67  pop rdx
68  pop rcx
69  pop rax
70 ENDM
71 
72 ; Dumps all general purpose registers and a flag register.
73 ASM_DUMP_REGISTERS MACRO
74  pushfq
75  PUSHAQ ; -8 * 16
76  mov rcx, rsp ; guest_context
77  mov rdx, rsp
78  add rdx, 8*17 ; stack_pointer
79 
80  sub rsp, 28h ; 28h for alignment
81  call UtilDumpGpRegisters ; UtilDumpGpRegisters(guest_context, stack_pointer);
82  add rsp, 28h
83 
84  POPAQ
85  popfq
86 ENDM
87 
88 
89 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
90 ;
91 ; implementations
92 ;
93 .CODE
94 
95 ; bool __stdcall AsmInitializeVm(
96 ; _In_ void (*vm_initialization_routine)(_In_ ULONG_PTR, _In_ ULONG_PTR,
97 ; _In_opt_ void *),
98 ; _In_opt_ void *context);
99 AsmInitializeVm PROC
100  ; RSP is not 16 bit aligned when it gets called, but the following odd
101  ; number (17 times) of push makes RSP 16 bit aligned.
102  pushfq
103  PUSHAQ ; -8 * 16
104 
105  mov rax, rcx
106  mov r8, rdx
107  mov rdx, asmResumeVm
108  mov rcx, rsp
109 
110  sub rsp, 20h
111  call rax ; vm_initialization_routine(rsp, asmResumeVm, context)
112  add rsp, 20h
113 
114  POPAQ
115  popfq
116  xor rax, rax ; return false
117  ret
118 
119  ; This is where the virtualized guest start to execute after successful
120  ; vmlaunch.
121 asmResumeVm:
122  nop ; keep this nop for ease of debugging
123  POPAQ
124  popfq
125 
126  sub rsp, 8 ; align RSP
127  ASM_DUMP_REGISTERS
128  add rsp, 8 ; restore RSP
129 
130  xor rax, rax
131  inc rax ; return true
132  ret
133 AsmInitializeVm ENDP
134 
135 ; void __stdcall AsmVmmEntryPoint();
136 AsmVmmEntryPoint PROC
137  ; No need to save the flag registers since it is restored from the VMCS at
138  ; the time of vmresume.
139  PUSHAQ ; -8 * 16
140  mov rcx, rsp
141 
142  ; save volatile XMM registers
143  sub rsp, 60h
144  movaps xmmword ptr [rsp + 0h], xmm0
145  movaps xmmword ptr [rsp + 10h], xmm1
146  movaps xmmword ptr [rsp + 20h], xmm2
147  movaps xmmword ptr [rsp + 30h], xmm3
148  movaps xmmword ptr [rsp + 40h], xmm4
149  movaps xmmword ptr [rsp + 50h], xmm5
150 
151  sub rsp, 20h
152  call VmmVmExitHandler ; bool vm_continue = VmmVmExitHandler(guest_context);
153  add rsp, 20h
154 
155  ; restore XMM registers
156  movaps xmm0, xmmword ptr [rsp + 0h]
157  movaps xmm1, xmmword ptr [rsp + 10h]
158  movaps xmm2, xmmword ptr [rsp + 20h]
159  movaps xmm3, xmmword ptr [rsp + 30h]
160  movaps xmm4, xmmword ptr [rsp + 40h]
161  movaps xmm5, xmmword ptr [rsp + 50h]
162  add rsp, 60h
163 
164  test al, al
165  jz exitVm ; if (!vm_continue) jmp exitVm
166 
167  POPAQ
168  vmresume
169  jmp vmxError
170 
171 exitVm:
172  ; Executes vmxoff and ends virtualization
173  ; rax = Guest's rflags
174  ; rdx = Guest's rsp
175  ; rcx = Guest's rip for the next instruction
176  POPAQ
177  vmxoff
178  jz vmxError ; if (ZF) jmp
179  jc vmxError ; if (CF) jmp
180  push rax
181  popfq ; rflags <= GurstFlags
182  mov rsp, rdx ; rsp <= GuestRsp
183  push rcx
184  ret ; jmp AddressToReturn
185 
186 vmxError:
187  ; Diagnose a critical error
188  pushfq
189  PUSHAQ ; -8 * 16
190  mov rcx, rsp ; all_regs
191 
192  sub rsp, 28h ; 28h for alignment
193  call VmmVmxFailureHandler ; VmmVmxFailureHandler(all_regs);
194  add rsp, 28h
195  int 3
196 AsmVmmEntryPoint ENDP
197 
198 ; unsigned char __stdcall AsmVmxCall(_In_ ULONG_PTR hypercall_number,
199 ; _In_opt_ void *context);
200 AsmVmxCall PROC
201  vmcall ; vmcall(hypercall_number, context)
202  jz errorWithCode ; if (ZF) jmp
203  jc errorWithoutCode ; if (CF) jmp
204  xor rax, rax ; return VMX_OK
205  ret
206 
207 errorWithoutCode:
208  mov rax, VMX_ERROR_WITHOUT_STATUS
209  ret
210 
211 errorWithCode:
212  mov rax, VMX_ERROR_WITH_STATUS
213  ret
214 AsmVmxCall ENDP
215 
216 ; void __stdcall AsmWriteGDT(_In_ const GDTR *gdtr);
217 AsmWriteGDT PROC
218  lgdt fword ptr [rcx]
219  ret
220 AsmWriteGDT ENDP
221 
222 ; void __stdcall AsmReadGDT(_Out_ GDTR *gdtr);
223 AsmReadGDT PROC
224  sgdt [rcx]
225  ret
226 AsmReadGDT ENDP
227 
228 ; void __stdcall AsmWriteLDTR(_In_ USHORT local_segmeng_selector);
229 AsmWriteLDTR PROC
230  lldt cx
231  ret
232 AsmWriteLDTR ENDP
233 
234 ; USHORT __stdcall AsmReadLDTR();
235 AsmReadLDTR PROC
236  sldt ax
237  ret
238 AsmReadLDTR ENDP
239 
240 ; void __stdcall AsmWriteTR(_In_ USHORT task_register);
241 AsmWriteTR PROC
242  ltr cx
243  ret
244 AsmWriteTR ENDP
245 
246 ; USHORT __stdcall AsmReadTR();
247 AsmReadTR PROC
248  str ax
249  ret
250 AsmReadTR ENDP
251 
252 ; void __stdcall AsmWriteES(_In_ USHORT segment_selector);
253 AsmWriteES PROC
254  mov es, cx
255  ret
256 AsmWriteES ENDP
257 
258 ; USHORT __stdcall AsmReadES();
259 AsmReadES PROC
260  mov ax, es
261  ret
262 AsmReadES ENDP
263 
264 ; void __stdcall AsmWriteCS(_In_ USHORT segment_selector);
265 AsmWriteCS PROC
266  mov cs, cx
267  ret
268 AsmWriteCS ENDP
269 
270 ; USHORT __stdcall AsmReadCS();
271 AsmReadCS PROC
272  mov ax, cs
273  ret
274 AsmReadCS ENDP
275 
276 ; void __stdcall AsmWriteSS(_In_ USHORT segment_selector);
277 AsmWriteSS PROC
278  mov ss, cx
279  ret
280 AsmWriteSS ENDP
281 
282 ; USHORT __stdcall AsmReadSS();
283 AsmReadSS PROC
284  mov ax, ss
285  ret
286 AsmReadSS ENDP
287 
288 ; void __stdcall AsmWriteDS(_In_ USHORT segment_selector);
289 AsmWriteDS PROC
290  mov ds, cx
291  ret
292 AsmWriteDS ENDP
293 
294 ; USHORT __stdcall AsmReadDS();
295 AsmReadDS PROC
296  mov ax, ds
297  ret
298 AsmReadDS ENDP
299 
300 ; void __stdcall AsmWriteFS(_In_ USHORT segment_selector);
301 AsmWriteFS PROC
302  mov fs, cx
303  ret
304 AsmWriteFS ENDP
305 
306 ; USHORT __stdcall AsmReadFS();
307 AsmReadFS PROC
308  mov ax, fs
309  ret
310 AsmReadFS ENDP
311 
312 ; void __stdcall AsmWriteGS(_In_ USHORT segment_selector);
313 AsmWriteGS PROC
314  mov gs, cx
315  ret
316 AsmWriteGS ENDP
317 
318 ; USHORT __stdcall AsmReadGS();
319 AsmReadGS PROC
320  mov ax, gs
321  ret
322 AsmReadGS ENDP
323 
324 ; ULONG_PTR __stdcall AsmLoadAccessRightsByte(_In_ ULONG_PTR segment_selector);
325 AsmLoadAccessRightsByte PROC
326  lar rax, rcx
327  ret
328 AsmLoadAccessRightsByte ENDP
329 
330 ; void __stdcall AsmInvalidateInternalCaches();
331 AsmInvalidateInternalCaches PROC
332  invd
333  ret
334 AsmInvalidateInternalCaches ENDP
335 
336 ; void __stdcall AsmWriteCR2(_In_ ULONG_PTR cr2_value);
337 AsmWriteCR2 PROC
338  mov cr2, rcx
339  ret
340 AsmWriteCR2 ENDP
341 
342 ; unsigned char __stdcall AsmInvept(
343 ; _In_ InvEptType invept_type,
344 ; _In_ const InvEptDescriptor *invept_descriptor);
345 AsmInvept PROC
346  ; invept ecx, oword ptr [rdx]
347  db 66h, 0fh, 38h, 80h, 0ah
348  jz errorWithCode ; if (ZF) jmp
349  jc errorWithoutCode ; if (CF) jmp
350  xor rax, rax ; return VMX_OK
351  ret
352 
353 errorWithoutCode:
354  mov rax, VMX_ERROR_WITHOUT_STATUS
355  ret
356 
357 errorWithCode:
358  mov rax, VMX_ERROR_WITH_STATUS
359  ret
360 AsmInvept ENDP
361 
362 ; unsigned char __stdcall AsmInvvpid(
363 ; _In_ InvVpidType invvpid_type,
364 ; _In_ const InvVpidDescriptor *invvpid_descriptor);
365 AsmInvvpid PROC
366  ; invvpid ecx, oword ptr [rdx]
367  db 66h, 0fh, 38h, 81h, 0ah
368  jz errorWithCode ; if (ZF) jmp
369  jc errorWithoutCode ; if (CF) jmp
370  xor rax, rax ; return VMX_OK
371  ret
372 
373 errorWithoutCode:
374  mov rax, VMX_ERROR_WITHOUT_STATUS
375  ret
376 
377 errorWithCode:
378  mov rax, VMX_ERROR_WITH_STATUS
379  ret
380 AsmInvvpid ENDP
381 
382 
383 PURGE PUSHAQ
384 PURGE POPAQ
385 PURGE ASM_DUMP_REGISTERS
386 END