xref: /kvm-unit-tests/x86/emulator.c (revision a991ed2fb5eef30af4e402cc79bcbae067d43e6b)
1 #include "ioram.h"
2 #include "vm.h"
3 #include "libcflat.h"
4 #include "desc.h"
5 #include "types.h"
6 #include "processor.h"
7 #include "vmalloc.h"
8 #include "alloc_page.h"
9 
10 #define memset __builtin_memset
11 #define TESTDEV_IO_PORT 0xe0
12 
13 static int exceptions;
14 
15 /* Forced emulation prefix, used to invoke the emulator unconditionally.  */
16 #define KVM_FEP "ud2; .byte 'k', 'v', 'm';"
17 #define KVM_FEP_LENGTH 5
18 static int fep_available = 1;
19 
20 struct regs {
21 	u64 rax, rbx, rcx, rdx;
22 	u64 rsi, rdi, rsp, rbp;
23 	u64 r8, r9, r10, r11;
24 	u64 r12, r13, r14, r15;
25 	u64 rip, rflags;
26 };
27 struct regs inregs, outregs, save;
28 
29 struct insn_desc {
30 	u64 ptr;
31 	size_t len;
32 };
33 
34 static char st1[] = "abcdefghijklmnop";
35 
36 static void test_stringio(void)
37 {
38 	unsigned char r = 0;
39 	asm volatile("cld \n\t"
40 		     "movw %0, %%dx \n\t"
41 		     "rep outsb \n\t"
42 		     : : "i"((short)TESTDEV_IO_PORT),
43 		       "S"(st1), "c"(sizeof(st1) - 1));
44 	asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
45 	report(r == st1[sizeof(st1) - 2], "outsb up"); /* last char */
46 
47 	asm volatile("std \n\t"
48 		     "movw %0, %%dx \n\t"
49 		     "rep outsb \n\t"
50 		     : : "i"((short)TESTDEV_IO_PORT),
51 		       "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1));
52 	asm volatile("cld \n\t" : : );
53 	asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
54 	report(r == st1[0], "outsb down");
55 }
56 
57 static void test_cmps_one(unsigned char *m1, unsigned char *m3)
58 {
59 	void *rsi, *rdi;
60 	long rcx, tmp;
61 
62 	rsi = m1; rdi = m3; rcx = 30;
63 	asm volatile("xor %[tmp], %[tmp] \n\t"
64 		     "repe cmpsb"
65 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
66 		     : : "cc");
67 	report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)");
68 
69 	rsi = m1; rdi = m3; rcx = 30;
70 	asm volatile("or $1, %[tmp]\n\t" // clear ZF
71 		     "repe cmpsb"
72 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
73 		     : : "cc");
74 	report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30,
75 	       "repe cmpsb (1.zf)");
76 
77 	rsi = m1; rdi = m3; rcx = 15;
78 	asm volatile("xor %[tmp], %[tmp] \n\t"
79 		     "repe cmpsw"
80 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
81 		     : : "cc");
82 	report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)");
83 
84 	rsi = m1; rdi = m3; rcx = 7;
85 	asm volatile("xor %[tmp], %[tmp] \n\t"
86 		     "repe cmpsl"
87 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
88 		     : : "cc");
89 	report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)");
90 
91 	rsi = m1; rdi = m3; rcx = 4;
92 	asm volatile("xor %[tmp], %[tmp] \n\t"
93 		     "repe cmpsq"
94 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
95 		     : : "cc");
96 	report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)");
97 
98 	rsi = m1; rdi = m3; rcx = 130;
99 	asm volatile("xor %[tmp], %[tmp] \n\t"
100 		     "repe cmpsb"
101 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
102 		     : : "cc");
103 	report(rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101,
104 	       "repe cmpsb (2)");
105 
106 	rsi = m1; rdi = m3; rcx = 65;
107 	asm volatile("xor %[tmp], %[tmp] \n\t"
108 		     "repe cmpsw"
109 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
110 		     : : "cc");
111 	report(rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102,
112 	       "repe cmpsw (2)");
113 
114 	rsi = m1; rdi = m3; rcx = 32;
115 	asm volatile("xor %[tmp], %[tmp] \n\t"
116 		     "repe cmpsl"
117 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
118 		     : : "cc");
119 	report(rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104,
120 	       "repe cmpll (2)");
121 
122 	rsi = m1; rdi = m3; rcx = 16;
123 	asm volatile("xor %[tmp], %[tmp] \n\t"
124 		     "repe cmpsq"
125 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
126 		     : : "cc");
127 	report(rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104,
128 	       "repe cmpsq (2)");
129 
130 }
131 
132 static void test_cmps(void *mem)
133 {
134 	unsigned char *m1 = mem, *m2 = mem + 1024;
135 	unsigned char m3[1024];
136 
137 	for (int i = 0; i < 100; ++i)
138 		m1[i] = m2[i] = m3[i] = i;
139 	for (int i = 100; i < 200; ++i)
140 		m1[i] = (m3[i] = m2[i] = i) + 1;
141 	test_cmps_one(m1, m3);
142 	test_cmps_one(m1, m2);
143 }
144 
145 static void test_scas(void *mem)
146 {
147     bool z;
148     void *di;
149 
150     *(ulong *)mem = 0x77665544332211;
151 
152     di = mem;
153     asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11));
154     report(di == mem + 1 && z, "scasb match");
155 
156     di = mem;
157     asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54));
158     report(di == mem + 1 && !z, "scasb mismatch");
159 
160     di = mem;
161     asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211));
162     report(di == mem + 2 && z, "scasw match");
163 
164     di = mem;
165     asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11));
166     report(di == mem + 2 && !z, "scasw mismatch");
167 
168     di = mem;
169     asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff44332211ul));
170     report(di == mem + 4 && z, "scasd match");
171 
172     di = mem;
173     asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211));
174     report(di == mem + 4 && !z, "scasd mismatch");
175 
176     di = mem;
177     asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul));
178     report(di == mem + 8 && z, "scasq match");
179 
180     di = mem;
181     asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3));
182     report(di == mem + 8 && !z, "scasq mismatch");
183 }
184 
185 static void test_cr8(void)
186 {
187 	unsigned long src, dst;
188 
189 	dst = 777;
190 	src = 3;
191 	asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]"
192 		     : [dst]"+r"(dst), [src]"+r"(src));
193 	report(dst == 3 && src == 3, "mov %%cr8");
194 }
195 
196 static void test_push(void *mem)
197 {
198 	unsigned long tmp;
199 	unsigned long *stack_top = mem + 4096;
200 	unsigned long *new_stack_top;
201 	unsigned long memw = 0x123456789abcdeful;
202 
203 	memset(mem, 0x55, (void *)stack_top - mem);
204 
205 	asm volatile("mov %%rsp, %[tmp] \n\t"
206 		     "mov %[stack_top], %%rsp \n\t"
207 		     "pushq $-7 \n\t"
208 		     "pushq %[reg] \n\t"
209 		     "pushq (%[mem]) \n\t"
210 		     "pushq $-7070707 \n\t"
211 		     "mov %%rsp, %[new_stack_top] \n\t"
212 		     "mov %[tmp], %%rsp"
213 		     : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top)
214 		     : [stack_top]"r"(stack_top),
215 		       [reg]"r"(-17l), [mem]"r"(&memw)
216 		     : "memory");
217 
218 	report(stack_top[-1] == -7ul, "push $imm8");
219 	report(stack_top[-2] == -17ul, "push %%reg");
220 	report(stack_top[-3] == 0x123456789abcdeful, "push mem");
221 	report(stack_top[-4] == -7070707, "push $imm");
222 }
223 
224 static void test_pop(void *mem)
225 {
226 	unsigned long tmp, tmp3, rsp, rbp;
227 	unsigned long *stack_top = mem + 4096;
228 	unsigned long memw = 0x123456789abcdeful;
229 	static unsigned long tmp2;
230 
231 	memset(mem, 0x55, (void *)stack_top - mem);
232 
233 	asm volatile("pushq %[val] \n\t"
234 		     "popq (%[mem])"
235 		     : : [val]"m"(memw), [mem]"r"(mem) : "memory");
236 	report(*(unsigned long *)mem == memw, "pop mem");
237 
238 	memw = 7 - memw;
239 	asm volatile("mov %%rsp, %[tmp] \n\t"
240 		     "mov %[stack_top], %%rsp \n\t"
241 		     "pushq %[val] \n\t"
242 		     "popq %[tmp2] \n\t"
243 		     "mov %[tmp], %%rsp"
244 		     : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2)
245 		     : [val]"r"(memw), [stack_top]"r"(stack_top)
246 		     : "memory");
247 	report(tmp2 == memw, "pop mem (2)");
248 
249 	memw = 129443 - memw;
250 	asm volatile("mov %%rsp, %[tmp] \n\t"
251 		     "mov %[stack_top], %%rsp \n\t"
252 		     "pushq %[val] \n\t"
253 		     "popq %[tmp2] \n\t"
254 		     "mov %[tmp], %%rsp"
255 		     : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2)
256 		     : [val]"r"(memw), [stack_top]"r"(stack_top)
257 		     : "memory");
258 	report(tmp2 == memw, "pop reg");
259 
260 	asm volatile("mov %%rsp, %[tmp] \n\t"
261 		     "mov %[stack_top], %%rsp \n\t"
262 		     "push $1f \n\t"
263 		     "ret \n\t"
264 		     "2: jmp 2b \n\t"
265 		     "1: mov %[tmp], %%rsp"
266 		     : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top)
267 		     : "memory");
268 	report(1, "ret");
269 
270 	stack_top[-1] = 0x778899;
271 	asm volatile("mov %[stack_top], %%r8 \n\t"
272 		     "mov %%rsp, %%r9 \n\t"
273 		     "xchg %%rbp, %%r8 \n\t"
274 		     "leave \n\t"
275 		     "xchg %%rsp, %%r9 \n\t"
276 		     "xchg %%rbp, %%r8 \n\t"
277 		     "mov %%r9, %[tmp] \n\t"
278 		     "mov %%r8, %[tmp3]"
279 		     : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1)
280 		     : "memory", "r8", "r9");
281 	report(tmp == (ulong)stack_top && tmp3 == 0x778899, "leave");
282 
283 	rbp = 0xaa55aa55bb66bb66ULL;
284 	rsp = (unsigned long)stack_top;
285 	asm volatile("mov %[rsp], %%r8 \n\t"
286 		     "mov %[rbp], %%r9 \n\t"
287 		     "xchg %%rsp, %%r8 \n\t"
288 		     "xchg %%rbp, %%r9 \n\t"
289 		     "enter $0x1238, $0 \n\t"
290 		     "xchg %%rsp, %%r8 \n\t"
291 		     "xchg %%rbp, %%r9 \n\t"
292 		     "xchg %%r8, %[rsp] \n\t"
293 		     "xchg %%r9, %[rbp]"
294 		     : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory", "r8", "r9");
295 	report(rsp == (unsigned long)stack_top - 8 - 0x1238
296 	       && rbp == (unsigned long)stack_top - 8
297 	       && stack_top[-1] == 0xaa55aa55bb66bb66ULL,
298 	       "enter");
299 }
300 
301 static void test_ljmp(void *mem)
302 {
303     unsigned char *m = mem;
304     volatile int res = 1;
305 
306     *(unsigned long**)m = &&jmpf;
307     asm volatile ("data16 mov %%cs, %0":"=m"(*(m + sizeof(unsigned long))));
308     asm volatile ("rex64 ljmp *%0"::"m"(*m));
309     res = 0;
310 jmpf:
311     report(res, "ljmp");
312 }
313 
314 static void test_incdecnotneg(void *mem)
315 {
316     unsigned long *m = mem, v = 1234;
317     unsigned char *mb = mem, vb = 66;
318 
319     *m = 0;
320 
321     asm volatile ("incl %0":"+m"(*m));
322     report(*m == 1, "incl");
323     asm volatile ("decl %0":"+m"(*m));
324     report(*m == 0, "decl");
325     asm volatile ("incb %0":"+m"(*m));
326     report(*m == 1, "incb");
327     asm volatile ("decb %0":"+m"(*m));
328     report(*m == 0, "decb");
329 
330     asm volatile ("lock incl %0":"+m"(*m));
331     report(*m == 1, "lock incl");
332     asm volatile ("lock decl %0":"+m"(*m));
333     report(*m == 0, "lock decl");
334     asm volatile ("lock incb %0":"+m"(*m));
335     report(*m == 1, "lock incb");
336     asm volatile ("lock decb %0":"+m"(*m));
337     report(*m == 0, "lock decb");
338 
339     *m = v;
340 
341     asm ("lock negq %0" : "+m"(*m)); v = -v;
342     report(*m == v, "lock negl");
343     asm ("lock notq %0" : "+m"(*m)); v = ~v;
344     report(*m == v, "lock notl");
345 
346     *mb = vb;
347 
348     asm ("lock negb %0" : "+m"(*mb)); vb = -vb;
349     report(*mb == vb, "lock negb");
350     asm ("lock notb %0" : "+m"(*mb)); vb = ~vb;
351     report(*mb == vb, "lock notb");
352 }
353 
354 static void test_smsw(uint64_t *h_mem)
355 {
356 	char mem[16];
357 	unsigned short msw, msw_orig, *pmsw;
358 	int i, zero;
359 
360 	msw_orig = read_cr0();
361 
362 	asm("smsw %0" : "=r"(msw));
363 	report(msw == msw_orig, "smsw (1)");
364 
365 	memset(mem, 0, 16);
366 	pmsw = (void *)mem;
367 	asm("smsw %0" : "=m"(pmsw[4]));
368 	zero = 1;
369 	for (i = 0; i < 8; ++i)
370 		if (i != 4 && pmsw[i])
371 			zero = 0;
372 	report(msw == pmsw[4] && zero, "smsw (2)");
373 
374 	/* Trigger exit on smsw */
375 	*h_mem = 0x12345678abcdeful;
376 	asm volatile("smsw %0" : "+m"(*h_mem));
377 	report(msw == (unsigned short)*h_mem &&
378 	       (*h_mem & ~0xfffful) == 0x12345678ab0000ul, "smsw (3)");
379 }
380 
381 static void test_lmsw(void)
382 {
383 	char mem[16];
384 	unsigned short msw, *pmsw;
385 	unsigned long cr0;
386 
387 	cr0 = read_cr0();
388 
389 	msw = cr0 ^ 8;
390 	asm("lmsw %0" : : "r"(msw));
391 	printf("before %lx after %lx\n", cr0, read_cr0());
392 	report((cr0 ^ read_cr0()) == 8, "lmsw (1)");
393 
394 	pmsw = (void *)mem;
395 	*pmsw = cr0;
396 	asm("lmsw %0" : : "m"(*pmsw));
397 	printf("before %lx after %lx\n", cr0, read_cr0());
398 	report(cr0 == read_cr0(), "lmsw (2)");
399 
400 	/* lmsw can't clear cr0.pe */
401 	msw = (cr0 & ~1ul) ^ 4;  /* change EM to force trap */
402 	asm("lmsw %0" : : "r"(msw));
403 	report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)");
404 
405 	/* back to normal */
406 	msw = cr0;
407 	asm("lmsw %0" : : "r"(msw));
408 }
409 
410 static void test_xchg(void *mem)
411 {
412 	unsigned long *memq = mem;
413 	unsigned long rax;
414 
415 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
416 		     "mov %%rax, (%[memq])\n\t"
417 		     "mov $0xfedcba9876543210, %%rax\n\t"
418 		     "xchg %%al, (%[memq])\n\t"
419 		     "mov %%rax, %[rax]\n\t"
420 		     : [rax]"=r"(rax)
421 		     : [memq]"r"(memq)
422 		     : "memory", "rax");
423 	report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10,
424 	       "xchg reg, r/m (1)");
425 
426 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
427 		     "mov %%rax, (%[memq])\n\t"
428 		     "mov $0xfedcba9876543210, %%rax\n\t"
429 		     "xchg %%ax, (%[memq])\n\t"
430 		     "mov %%rax, %[rax]\n\t"
431 		     : [rax]"=r"(rax)
432 		     : [memq]"r"(memq)
433 		     : "memory", "rax");
434 	report(rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210,
435 	       "xchg reg, r/m (2)");
436 
437 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
438 		     "mov %%rax, (%[memq])\n\t"
439 		     "mov $0xfedcba9876543210, %%rax\n\t"
440 		     "xchg %%eax, (%[memq])\n\t"
441 		     "mov %%rax, %[rax]\n\t"
442 		     : [rax]"=r"(rax)
443 		     : [memq]"r"(memq)
444 		     : "memory", "rax");
445 	report(rax == 0x89abcdef && *memq == 0x123456776543210,
446 	       "xchg reg, r/m (3)");
447 
448 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
449 		     "mov %%rax, (%[memq])\n\t"
450 		     "mov $0xfedcba9876543210, %%rax\n\t"
451 		     "xchg %%rax, (%[memq])\n\t"
452 		     "mov %%rax, %[rax]\n\t"
453 		     : [rax]"=r"(rax)
454 		     : [memq]"r"(memq)
455 		     : "memory", "rax");
456 	report(rax == 0x123456789abcdef && *memq == 0xfedcba9876543210,
457 	       "xchg reg, r/m (4)");
458 }
459 
460 static void test_xadd(void *mem)
461 {
462 	unsigned long *memq = mem;
463 	unsigned long rax;
464 
465 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
466 		     "mov %%rax, (%[memq])\n\t"
467 		     "mov $0xfedcba9876543210, %%rax\n\t"
468 		     "xadd %%al, (%[memq])\n\t"
469 		     "mov %%rax, %[rax]\n\t"
470 		     : [rax]"=r"(rax)
471 		     : [memq]"r"(memq)
472 		     : "memory", "rax");
473 	report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff,
474 	       "xadd reg, r/m (1)");
475 
476 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
477 		     "mov %%rax, (%[memq])\n\t"
478 		     "mov $0xfedcba9876543210, %%rax\n\t"
479 		     "xadd %%ax, (%[memq])\n\t"
480 		     "mov %%rax, %[rax]\n\t"
481 		     : [rax]"=r"(rax)
482 		     : [memq]"r"(memq)
483 		     : "memory", "rax");
484 	report(rax == 0xfedcba987654cdef && *memq == 0x123456789abffff,
485 	       "xadd reg, r/m (2)");
486 
487 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
488 		     "mov %%rax, (%[memq])\n\t"
489 		     "mov $0xfedcba9876543210, %%rax\n\t"
490 		     "xadd %%eax, (%[memq])\n\t"
491 		     "mov %%rax, %[rax]\n\t"
492 		     : [rax]"=r"(rax)
493 		     : [memq]"r"(memq)
494 		     : "memory", "rax");
495 	report(rax == 0x89abcdef && *memq == 0x1234567ffffffff,
496 	       "xadd reg, r/m (3)");
497 
498 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
499 		     "mov %%rax, (%[memq])\n\t"
500 		     "mov $0xfedcba9876543210, %%rax\n\t"
501 		     "xadd %%rax, (%[memq])\n\t"
502 		     "mov %%rax, %[rax]\n\t"
503 		     : [rax]"=r"(rax)
504 		     : [memq]"r"(memq)
505 		     : "memory", "rax");
506 	report(rax == 0x123456789abcdef && *memq == 0xffffffffffffffff,
507 	       "xadd reg, r/m (4)");
508 }
509 
510 static void test_btc(void *mem)
511 {
512 	unsigned int *a = mem;
513 
514 	memset(mem, 0, 4 * sizeof(unsigned int));
515 
516 	asm ("btcl $32, %0" :: "m"(a[0]) : "memory");
517 	asm ("btcl $1, %0" :: "m"(a[1]) : "memory");
518 	asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory");
519 	report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m");
520 
521 	asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory");
522 	report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m");
523 
524 	asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory");
525 	report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0,
526 	       "btcq reg, r/m");
527 }
528 
529 static void test_bsfbsr(void *mem)
530 {
531 	unsigned long rax, *memq = mem;
532 	unsigned eax, *meml = mem;
533 	unsigned short ax, *memw = mem;
534 	unsigned char z;
535 
536 	*memw = 0xc000;
537 	asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
538 	report(ax == 14, "bsfw r/m, reg");
539 
540 	*meml = 0xc0000000;
541 	asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
542 	report(eax == 30, "bsfl r/m, reg");
543 
544 	*memq = 0xc00000000000;
545 	asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
546 	report(rax == 46, "bsfq r/m, reg");
547 
548 	*memq = 0;
549 	asm("bsfq %[mem], %[a]; setz %[z]"
550 	    : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
551 	report(z == 1, "bsfq r/m, reg");
552 
553 	*memw = 0xc000;
554 	asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
555 	report(ax == 15, "bsrw r/m, reg");
556 
557 	*meml = 0xc0000000;
558 	asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
559 	report(eax == 31, "bsrl r/m, reg");
560 
561 	*memq = 0xc00000000000;
562 	asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
563 	report(rax == 47, "bsrq r/m, reg");
564 
565 	*memq = 0;
566 	asm("bsrq %[mem], %[a]; setz %[z]"
567 	    : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
568 	report(z == 1, "bsrq r/m, reg");
569 }
570 
571 static void test_imul(ulong *mem)
572 {
573     ulong a;
574 
575     *mem = 51; a = 0x1234567812345678UL;
576     asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem));
577     report(a == 0x12345678123439e8, "imul ax, mem");
578 
579     *mem = 51; a = 0x1234567812345678UL;
580     asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem));
581     report(a == 0xa06d39e8, "imul eax, mem");
582 
583     *mem = 51; a = 0x1234567812345678UL;
584     asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem));
585     report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem");
586 
587     *mem  = 0x1234567812345678UL; a = 0x8765432187654321L;
588     asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem));
589     report(a == 0x87654321876539e8, "imul ax, mem, imm8");
590 
591     *mem = 0x1234567812345678UL;
592     asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem));
593     report(a == 0xa06d39e8, "imul eax, mem, imm8");
594 
595     *mem = 0x1234567812345678UL;
596     asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem));
597     report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem, imm8");
598 
599     *mem  = 0x1234567812345678UL; a = 0x8765432187654321L;
600     asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem));
601     report(a == 0x8765432187650bc8, "imul ax, mem, imm");
602 
603     *mem = 0x1234567812345678UL;
604     asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem));
605     report(a == 0x1d950bc8, "imul eax, mem, imm");
606 
607     *mem = 0x1234567812345678UL;
608     asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem));
609     report(a == 0x1D950BDE1D950BC8L, "imul rax, mem, imm");
610 }
611 
612 static void test_muldiv(long *mem)
613 {
614     long a, d, aa, dd;
615     u8 ex = 1;
616 
617     *mem = 0; a = 1; d = 2;
618     asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
619 	 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
620     report(a == 1 && d == 2 && ex, "divq (fault)");
621 
622     *mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL;
623     asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
624 	 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
625     report(a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex,
626            "divq (1)");
627     aa = 0x1111111111111111; dd = 0x2222222222222222;
628     *mem = 0x3333333333333333; a = aa; d = dd;
629     asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem));
630     report(a == 0x1111111111110363 && d == dd, "mulb mem");
631     *mem = 0x3333333333333333; a = aa; d = dd;
632     asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem));
633     report(a == 0x111111111111c963 && d == 0x2222222222220369, "mulw mem");
634     *mem = 0x3333333333333333; a = aa; d = dd;
635     asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem));
636     report(a == 0x962fc963 && d == 0x369d036, "mull mem");
637     *mem = 0x3333333333333333; a = aa; d = dd;
638     asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem));
639     report(a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0, "mulq mem");
640 }
641 
642 typedef unsigned __attribute__((vector_size(16))) sse128;
643 
644 typedef union {
645     sse128 sse;
646     unsigned u[4];
647 } sse_union;
648 
649 static bool sseeq(sse_union *v1, sse_union *v2)
650 {
651     bool ok = true;
652     int i;
653 
654     for (i = 0; i < 4; ++i) {
655 	ok &= v1->u[i] == v2->u[i];
656     }
657 
658     return ok;
659 }
660 
661 static __attribute__((target("sse2"))) void test_sse(sse_union *mem)
662 {
663     sse_union v;
664 
665     write_cr0(read_cr0() & ~6); /* EM, TS */
666     write_cr4(read_cr4() | 0x200); /* OSFXSR */
667     v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
668     asm("movdqu %1, %0" : "=m"(*mem) : "x"(v.sse));
669     report(sseeq(&v, mem), "movdqu (read)");
670     mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
671     asm("movdqu %1, %0" : "=x"(v.sse) : "m"(*mem));
672     report(sseeq(mem, &v), "movdqu (write)");
673 
674     v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
675     asm("movaps %1, %0" : "=m"(*mem) : "x"(v.sse));
676     report(sseeq(mem, &v), "movaps (read)");
677     mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
678     asm("movaps %1, %0" : "=x"(v.sse) : "m"(*mem));
679     report(sseeq(&v, mem), "movaps (write)");
680 
681     v.u[0] = 1; v.u[1] = 2; v.u[2] = 3; v.u[3] = 4;
682     asm("movapd %1, %0" : "=m"(*mem) : "x"(v.sse));
683     report(sseeq(mem, &v), "movapd (read)");
684     mem->u[0] = 5; mem->u[1] = 6; mem->u[2] = 7; mem->u[3] = 8;
685     asm("movapd %1, %0" : "=x"(v.sse) : "m"(*mem));
686     report(sseeq(&v, mem), "movapd (write)");
687 }
688 
689 static void test_mmx(uint64_t *mem)
690 {
691     uint64_t v;
692 
693     write_cr0(read_cr0() & ~6); /* EM, TS */
694     asm volatile("fninit");
695     v = 0x0102030405060708ULL;
696     asm("movq %1, %0" : "=m"(*mem) : "y"(v));
697     report(v == *mem, "movq (mmx, read)");
698     *mem = 0x8070605040302010ull;
699     asm("movq %1, %0" : "=y"(v) : "m"(*mem));
700     report(v == *mem, "movq (mmx, write)");
701 }
702 
703 static void test_rip_relative(unsigned *mem, char *insn_ram)
704 {
705     /* movb $1, mem+2(%rip) */
706     insn_ram[0] = 0xc6;
707     insn_ram[1] = 0x05;
708     *(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7);
709     insn_ram[6] = 0x01;
710     /* ret */
711     insn_ram[7] = 0xc3;
712 
713     *mem = 0;
714     asm("callq *%1" : "+m"(*mem) : "r"(insn_ram));
715     report(*mem == 0x10000, "movb $imm, 0(%%rip)");
716 }
717 
718 static void test_shld_shrd(u32 *mem)
719 {
720     *mem = 0x12345678;
721     asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3));
722     report(*mem == ((0x12345678 << 3) | 5), "shld (cl)");
723     *mem = 0x12345678;
724     asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3));
725     report(*mem == ((0x12345678 >> 3) | (5u << 29)), "shrd (cl)");
726 }
727 
728 static void test_cmov(u32 *mem)
729 {
730 	u64 val;
731 	*mem = 0xabcdef12u;
732 	asm ("movq $0x1234567812345678, %%rax\n\t"
733 	     "cmpl %%eax, %%eax\n\t"
734 	     "cmovnel (%[mem]), %%eax\n\t"
735 	     "movq %%rax, %[val]\n\t"
736 	     : [val]"=r"(val) : [mem]"r"(mem) : "%rax", "cc");
737 	report(val == 0x12345678ul, "cmovnel");
738 }
739 
740 static unsigned long rip_advance;
741 
742 static void advance_rip_and_note_exception(struct ex_regs *regs)
743 {
744     ++exceptions;
745     regs->rip += rip_advance;
746 }
747 
748 static void test_mmx_movq_mf(uint64_t *mem)
749 {
750     /* movq %mm0, (%rax) */
751     extern char movq_start, movq_end;
752 
753     uint16_t fcw = 0;  /* all exceptions unmasked */
754     write_cr0(read_cr0() & ~6);  /* TS, EM */
755     exceptions = 0;
756     handle_exception(MF_VECTOR, advance_rip_and_note_exception);
757     asm volatile("fninit; fldcw %0" : : "m"(fcw));
758     asm volatile("fldz; fldz; fdivp"); /* generate exception */
759 
760     rip_advance = &movq_end - &movq_start;
761     asm(KVM_FEP "movq_start: movq %mm0, (%rax); movq_end:");
762     /* exit MMX mode */
763     asm volatile("fnclex; emms");
764     report(exceptions == 1, "movq mmx generates #MF");
765     handle_exception(MF_VECTOR, 0);
766 }
767 
768 static void test_jmp_noncanonical(uint64_t *mem)
769 {
770 	extern char nc_jmp_start, nc_jmp_end;
771 
772 	*mem = 0x1111111111111111ul;
773 
774 	exceptions = 0;
775 	rip_advance = &nc_jmp_end - &nc_jmp_start;
776 	handle_exception(GP_VECTOR, advance_rip_and_note_exception);
777 	asm volatile ("nc_jmp_start: jmp *%0; nc_jmp_end:" : : "m"(*mem));
778 	report(exceptions == 1, "jump to non-canonical address");
779 	handle_exception(GP_VECTOR, 0);
780 }
781 
782 static void test_movabs(uint64_t *mem)
783 {
784     /* mov $0x9090909090909090, %rcx */
785     unsigned long rcx;
786     asm(KVM_FEP "mov $0x9090909090909090, %0" : "=c" (rcx) : "0" (0));
787     report(rcx == 0x9090909090909090, "64-bit mov imm2");
788 }
789 
790 static void test_smsw_reg(uint64_t *mem)
791 {
792 	unsigned long cr0 = read_cr0();
793 	unsigned long rax;
794 	const unsigned long in_rax = 0x1234567890abcdeful;
795 
796 	asm(KVM_FEP "smsww %w0\n\t" : "=a" (rax) : "0" (in_rax));
797 	report((u16)rax == (u16)cr0 && rax >> 16 == in_rax >> 16,
798 	       "16-bit smsw reg");
799 
800 	asm(KVM_FEP "smswl %k0\n\t" : "=a" (rax) : "0" (in_rax));
801 	report(rax == (u32)cr0, "32-bit smsw reg");
802 
803 	asm(KVM_FEP "smswq %q0\n\t" : "=a" (rax) : "0" (in_rax));
804 	report(rax == cr0, "64-bit smsw reg");
805 }
806 
807 static void test_nop(uint64_t *mem)
808 {
809 	unsigned long rax;
810 	const unsigned long in_rax = 0x1234567890abcdeful;
811 	asm(KVM_FEP "nop\n\t" : "=a" (rax) : "0" (in_rax));
812 	report(rax == in_rax, "nop");
813 }
814 
815 static void test_mov_dr(uint64_t *mem)
816 {
817 	unsigned long rax;
818 	const unsigned long in_rax = 0;
819 	bool rtm_support = this_cpu_has(X86_FEATURE_RTM);
820 	unsigned long dr6_fixed_1 = rtm_support ? 0xfffe0ff0ul : 0xffff0ff0ul;
821 	asm(KVM_FEP "movq %0, %%dr6\n\t"
822 	    KVM_FEP "movq %%dr6, %0\n\t" : "=a" (rax) : "a" (in_rax));
823 	report(rax == dr6_fixed_1, "mov_dr6");
824 }
825 
826 static void test_push16(uint64_t *mem)
827 {
828 	uint64_t rsp1, rsp2;
829 	uint16_t r;
830 
831 	asm volatile (	"movq %%rsp, %[rsp1]\n\t"
832 			"pushw %[v]\n\t"
833 			"popw %[r]\n\t"
834 			"movq %%rsp, %[rsp2]\n\t"
835 			"movq %[rsp1], %%rsp\n\t" :
836 			[rsp1]"=r"(rsp1), [rsp2]"=r"(rsp2), [r]"=r"(r)
837 			: [v]"m"(*mem) : "memory");
838 	report(rsp1 == rsp2, "push16");
839 }
840 
841 static void test_crosspage_mmio(volatile uint8_t *mem)
842 {
843     volatile uint16_t w, *pw;
844 
845     pw = (volatile uint16_t *)&mem[4095];
846     mem[4095] = 0x99;
847     mem[4096] = 0x77;
848     asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory");
849     report(w == 0x7799, "cross-page mmio read");
850     asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa));
851     report(mem[4095] == 0xaa && mem[4096] == 0x88, "cross-page mmio write");
852 }
853 
854 static void test_string_io_mmio(volatile uint8_t *mem)
855 {
856 	/* Cross MMIO pages.*/
857 	volatile uint8_t *mmio = mem + 4032;
858 
859 	asm volatile("outw %%ax, %%dx  \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT));
860 
861 	asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024));
862 
863 	report(mmio[1023] == 0x99, "string_io_mmio");
864 }
865 
866 /* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */
867 #if 0
868 static void test_lgdt_lidt(volatile uint8_t *mem)
869 {
870     struct descriptor_table_ptr orig, fresh = {};
871 
872     sgdt(&orig);
873     *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
874 	.limit = 0xf234,
875 	.base = 0x12345678abcd,
876     };
877     cli();
878     asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
879     sgdt(&fresh);
880     lgdt(&orig);
881     sti();
882     report(orig.limit == fresh.limit && orig.base == fresh.base,
883            "lgdt (long address)");
884 
885     sidt(&orig);
886     *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
887 	.limit = 0x432f,
888 	.base = 0xdbca87654321,
889     };
890     cli();
891     asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
892     sidt(&fresh);
893     lidt(&orig);
894     sti();
895     report(orig.limit == fresh.limit && orig.base == fresh.base,
896            "lidt (long address)");
897 }
898 #endif
899 
900 static void ss_bad_rpl(struct ex_regs *regs)
901 {
902     extern char ss_bad_rpl_cont;
903 
904     ++exceptions;
905     regs->rip = (ulong)&ss_bad_rpl_cont;
906 }
907 
908 static void test_sreg(volatile uint16_t *mem)
909 {
910     u16 ss = read_ss();
911 
912     // check for null segment load
913     *mem = 0;
914     asm volatile("mov %0, %%ss" : : "m"(*mem));
915     report(read_ss() == 0, "mov null, %%ss");
916 
917     // check for exception when ss.rpl != cpl on null segment load
918     exceptions = 0;
919     handle_exception(GP_VECTOR, ss_bad_rpl);
920     *mem = 3;
921     asm volatile("mov %0, %%ss; ss_bad_rpl_cont:" : : "m"(*mem));
922     report(exceptions == 1 && read_ss() == 0,
923            "mov null, %%ss (with ss.rpl != cpl)");
924     handle_exception(GP_VECTOR, 0);
925     write_ss(ss);
926 }
927 
928 /* Broken emulation causes triple fault, which skips the other tests. */
929 #if 0
930 static void test_lldt(volatile uint16_t *mem)
931 {
932     u64 gdt[] = { 0, /* null descriptor */
933 #ifdef __X86_64__
934 		  0, /* ldt descriptor is 16 bytes in long mode */
935 #endif
936 		  0x0000f82000000ffffull /* ldt descriptor */ };
937     struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1,
938 					    .base = (ulong)&gdt };
939     struct descriptor_table_ptr orig_gdt;
940 
941     cli();
942     sgdt(&orig_gdt);
943     lgdt(&gdt_ptr);
944     *mem = 0x8;
945     asm volatile("lldt %0" : : "m"(*mem));
946     lgdt(&orig_gdt);
947     sti();
948     report(sldt() == *mem, "lldt");
949 }
950 #endif
951 
952 static void test_ltr(volatile uint16_t *mem)
953 {
954     struct descriptor_table_ptr gdt_ptr;
955     uint64_t *gdt, *trp;
956     uint16_t tr = str();
957     uint64_t busy_mask = (uint64_t)1 << 41;
958 
959     sgdt(&gdt_ptr);
960     gdt = (uint64_t *)gdt_ptr.base;
961     trp = &gdt[tr >> 3];
962     *trp &= ~busy_mask;
963     *mem = tr;
964     asm volatile("ltr %0" : : "m"(*mem) : "memory");
965     report(str() == tr && (*trp & busy_mask), "ltr");
966 }
967 
968 static void test_simplealu(u32 *mem)
969 {
970     *mem = 0x1234;
971     asm("or %1, %0" : "+m"(*mem) : "r"(0x8001));
972     report(*mem == 0x9235, "or");
973     asm("add %1, %0" : "+m"(*mem) : "r"(2));
974     report(*mem == 0x9237, "add");
975     asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111));
976     report(*mem == 0x8326, "xor");
977     asm("sub %1, %0" : "+m"(*mem) : "r"(0x26));
978     report(*mem == 0x8300, "sub");
979     asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
980     report(*mem == 0x8400, "adc(0)");
981     asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
982     report(*mem == 0x8501, "adc(0)");
983     asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0));
984     report(*mem == 0x8501, "sbb(0)");
985     asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0));
986     report(*mem == 0x8500, "sbb(1)");
987     asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77));
988     report(*mem == 0x8400, "and");
989     asm("test %1, %0" : "+m"(*mem) : "r"(0xf000));
990     report(*mem == 0x8400, "test");
991 }
992 
993 static void illegal_movbe_handler(struct ex_regs *regs)
994 {
995 	extern char bad_movbe_cont;
996 
997 	++exceptions;
998 	regs->rip = (ulong)&bad_movbe_cont;
999 }
1000 
1001 static void test_illegal_movbe(void)
1002 {
1003 	if (!this_cpu_has(X86_FEATURE_MOVBE)) {
1004 		report_skip("illegal movbe");
1005 		return;
1006 	}
1007 
1008 	exceptions = 0;
1009 	handle_exception(UD_VECTOR, illegal_movbe_handler);
1010 	asm volatile(".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t"
1011 		     " bad_movbe_cont:" : : : "rax");
1012 	report(exceptions == 1, "illegal movbe");
1013 	handle_exception(UD_VECTOR, 0);
1014 }
1015 
1016 static void record_no_fep(struct ex_regs *regs)
1017 {
1018 	fep_available = 0;
1019 	regs->rip += KVM_FEP_LENGTH;
1020 }
1021 
1022 int main(void)
1023 {
1024 	void *mem;
1025 	void *insn_page;
1026 	void *insn_ram;
1027 	unsigned long t1, t2;
1028 
1029 	setup_vm();
1030 	handle_exception(UD_VECTOR, record_no_fep);
1031 	asm(KVM_FEP "nop");
1032 	handle_exception(UD_VECTOR, 0);
1033 
1034 	mem = alloc_vpages(2);
1035 	install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem);
1036 	// install the page twice to test cross-page mmio
1037 	install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096);
1038 	insn_page = alloc_page();
1039 	insn_ram = vmap(virt_to_phys(insn_page), 4096);
1040 
1041 	// test mov reg, r/m and mov r/m, reg
1042 	t1 = 0x123456789abcdef;
1043 	asm volatile("mov %[t1], (%[mem]) \n\t"
1044 		     "mov (%[mem]), %[t2]"
1045 		     : [t2]"=r"(t2)
1046 		     : [t1]"r"(t1), [mem]"r"(mem)
1047 		     : "memory");
1048 	report(t2 == 0x123456789abcdef, "mov reg, r/m (1)");
1049 
1050 	test_simplealu(mem);
1051 	test_cmps(mem);
1052 	test_scas(mem);
1053 
1054 	test_push(mem);
1055 	test_pop(mem);
1056 
1057 	test_xchg(mem);
1058 	test_xadd(mem);
1059 
1060 	test_cr8();
1061 
1062 	test_smsw(mem);
1063 	test_lmsw();
1064 	test_ljmp(mem);
1065 	test_stringio();
1066 	test_incdecnotneg(mem);
1067 	test_btc(mem);
1068 	test_bsfbsr(mem);
1069 	test_imul(mem);
1070 	test_muldiv(mem);
1071 	test_sse(mem);
1072 	test_mmx(mem);
1073 	test_rip_relative(mem, insn_ram);
1074 	test_shld_shrd(mem);
1075 	//test_lgdt_lidt(mem);
1076 	test_sreg(mem);
1077 	//test_lldt(mem);
1078 	test_ltr(mem);
1079 	test_cmov(mem);
1080 
1081 	if (fep_available) {
1082 		test_mmx_movq_mf(mem);
1083 		test_movabs(mem);
1084 		test_smsw_reg(mem);
1085 		test_nop(mem);
1086 		test_mov_dr(mem);
1087 	} else {
1088 		report_skip("skipping register-only tests, "
1089 			    "use kvm.forced_emulation_prefix=1 to enable");
1090 	}
1091 
1092 	test_push16(mem);
1093 	test_crosspage_mmio(mem);
1094 
1095 	test_string_io_mmio(mem);
1096 
1097 	test_jmp_noncanonical(mem);
1098 	test_illegal_movbe();
1099 
1100 	return report_summary();
1101 }
1102