1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
|
From: Andrew Cooper <andrew.cooper3@citrix.com>
Subject: x86/pv: Move exception injection into {,compat_}test_all_events()
This allows paths to jump straight to {,compat_}test_all_events() and have
injection of pending exceptions happen automatically, rather than requiring
all calling paths to handle exceptions themselves.
The normal exception path is simplified as a result, and
compat_post_handle_exception() is removed entirely.
This is part of XSA-260 / CVE-2018-8897.
Signed-off-by: Andrew Cooper <andrew.cooper3@citrix.com>
Reviewed-by: Jan Beulich <jbeulich@suse.com>
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -39,6 +39,12 @@ ENTRY(compat_test_all_events)
leaq irq_stat+IRQSTAT_softirq_pending(%rip),%rcx
cmpl $0,(%rcx,%rax,1)
jne compat_process_softirqs
+
+ /* Inject exception if pending. */
+ lea VCPU_trap_bounce(%rbx), %rdx
+ testb $TBF_EXCEPTION, TRAPBOUNCE_flags(%rdx)
+ jnz .Lcompat_process_trapbounce
+
testb $1,VCPU_mce_pending(%rbx)
jnz compat_process_mce
.Lcompat_test_guest_nmi:
@@ -68,6 +74,15 @@ compat_process_softirqs:
call do_softirq
jmp compat_test_all_events
+ ALIGN
+/* %rbx: struct vcpu, %rdx: struct trap_bounce */
+.Lcompat_process_trapbounce:
+ sti
+.Lcompat_bounce_exception:
+ call compat_create_bounce_frame
+ movb $0, TRAPBOUNCE_flags(%rdx)
+ jmp compat_test_all_events
+
ALIGN
/* %rbx: struct vcpu */
compat_process_mce:
@@ -189,15 +204,6 @@ ENTRY(cr4_pv32_restore)
xor %eax, %eax
ret
-/* %rdx: trap_bounce, %rbx: struct vcpu */
-ENTRY(compat_post_handle_exception)
- testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
- jz compat_test_all_events
-.Lcompat_bounce_exception:
- call compat_create_bounce_frame
- movb $0,TRAPBOUNCE_flags(%rdx)
- jmp compat_test_all_events
-
.section .text.entry, "ax", @progbits
/* See lstar_enter for entry register state. */
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -42,6 +42,12 @@ test_all_events:
leaq irq_stat+IRQSTAT_softirq_pending(%rip), %rcx
cmpl $0, (%rcx, %rax, 1)
jne process_softirqs
+
+ /* Inject exception if pending. */
+ lea VCPU_trap_bounce(%rbx), %rdx
+ testb $TBF_EXCEPTION, TRAPBOUNCE_flags(%rdx)
+ jnz .Lprocess_trapbounce
+
cmpb $0, VCPU_mce_pending(%rbx)
jne process_mce
.Ltest_guest_nmi:
@@ -70,6 +76,15 @@ process_softirqs:
jmp test_all_events
ALIGN
+/* %rbx: struct vcpu, %rdx struct trap_bounce */
+.Lprocess_trapbounce:
+ sti
+.Lbounce_exception:
+ call create_bounce_frame
+ movb $0, TRAPBOUNCE_flags(%rdx)
+ jmp test_all_events
+
+ ALIGN
/* %rbx: struct vcpu */
process_mce:
testb $1 << VCPU_TRAP_MCE, VCPU_async_exception_mask(%rbx)
@@ -667,15 +682,9 @@ handle_exception_saved:
mov %r15, STACK_CPUINFO_FIELD(xen_cr3)(%r14)
testb $3,UREGS_cs(%rsp)
jz restore_all_xen
- leaq VCPU_trap_bounce(%rbx),%rdx
movq VCPU_domain(%rbx),%rax
testb $1,DOMAIN_is_32bit_pv(%rax)
- jnz compat_post_handle_exception
- testb $TBF_EXCEPTION,TRAPBOUNCE_flags(%rdx)
- jz test_all_events
-.Lbounce_exception:
- call create_bounce_frame
- movb $0,TRAPBOUNCE_flags(%rdx)
+ jnz compat_test_all_events
jmp test_all_events
/* No special register assumptions. */
|