OLD | NEW |
1 // Copyright 2015 The Crashpad Authors. All rights reserved. | 1 // Copyright 2015 The Crashpad Authors. All rights reserved. |
2 // | 2 // |
3 // Licensed under the Apache License, Version 2.0 (the "License"); | 3 // Licensed under the Apache License, Version 2.0 (the "License"); |
4 // you may not use this file except in compliance with the License. | 4 // you may not use this file except in compliance with the License. |
5 // You may obtain a copy of the License at | 5 // You may obtain a copy of the License at |
6 // | 6 // |
7 // http://www.apache.org/licenses/LICENSE-2.0 | 7 // http://www.apache.org/licenses/LICENSE-2.0 |
8 // | 8 // |
9 // Unless required by applicable law or agreed to in writing, software | 9 // Unless required by applicable law or agreed to in writing, software |
10 // distributed under the License is distributed on an "AS IS" BASIS, | 10 // distributed under the License is distributed on an "AS IS" BASIS, |
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 // See the License for the specific language governing permissions and | 12 // See the License for the specific language governing permissions and |
13 // limitations under the License. | 13 // limitations under the License. |
14 | 14 |
15 #include "snapshot/win/cpu_context_win.h" | 15 #include "snapshot/win/cpu_context_win.h" |
16 | 16 |
17 #include <stdint.h> | 17 #include <stdint.h> |
18 #include <string.h> | 18 #include <string.h> |
19 | 19 |
20 #include "base/logging.h" | 20 #include "base/logging.h" |
21 #include "snapshot/cpu_context.h" | 21 #include "snapshot/cpu_context.h" |
22 | 22 |
23 namespace crashpad { | 23 namespace crashpad { |
24 | 24 |
25 namespace { | 25 namespace { |
26 | 26 |
| 27 // Validation for casts used with CPUContextX86::FsaveToFxsave(). |
| 28 static_assert(sizeof(CPUContextX86::Fsave) == |
| 29 offsetof(WOW64_FLOATING_SAVE_AREA, Cr0NpxState), |
| 30 "WoW64 fsave types must be equivalent"); |
| 31 #if defined(ARCH_CPU_X86) |
| 32 static_assert(sizeof(CPUContextX86::Fsave) == |
| 33 offsetof(FLOATING_SAVE_AREA, Spare0), |
| 34 "fsave types must be equivalent"); |
| 35 #endif // ARCH_CPU_X86 |
| 36 |
| 37 template <typename T> |
| 38 bool HasContextPart(const T& context, uint32_t bits) { |
| 39 return (context.ContextFlags & bits) == bits; |
| 40 } |
| 41 |
27 template <class T> | 42 template <class T> |
28 void CommonInitializeX86Context(const T& context, CPUContextX86* out) { | 43 void CommonInitializeX86Context(const T& context, CPUContextX86* out) { |
29 LOG_IF(ERROR, !(context.ContextFlags & WOW64_CONTEXT_i386)) | 44 // This function assumes that the WOW64_CONTEXT_* and x86 CONTEXT_* values |
30 << "non-x86 context"; | 45 // for ContextFlags are identical. This can be tested when targeting 32-bit |
| 46 // x86. |
| 47 #if defined(ARCH_CPU_X86) |
| 48 static_assert(sizeof(CONTEXT) == sizeof(WOW64_CONTEXT), |
| 49 "type mismatch: CONTEXT"); |
| 50 #define ASSERT_WOW64_EQUIVALENT(x) \ |
| 51 do { \ |
| 52 static_assert(x == WOW64_##x, "value mismatch: " #x); \ |
| 53 } while (false) |
| 54 ASSERT_WOW64_EQUIVALENT(CONTEXT_i386); |
| 55 ASSERT_WOW64_EQUIVALENT(CONTEXT_i486); |
| 56 ASSERT_WOW64_EQUIVALENT(CONTEXT_CONTROL); |
| 57 ASSERT_WOW64_EQUIVALENT(CONTEXT_INTEGER); |
| 58 ASSERT_WOW64_EQUIVALENT(CONTEXT_SEGMENTS); |
| 59 ASSERT_WOW64_EQUIVALENT(CONTEXT_FLOATING_POINT); |
| 60 ASSERT_WOW64_EQUIVALENT(CONTEXT_DEBUG_REGISTERS); |
| 61 ASSERT_WOW64_EQUIVALENT(CONTEXT_EXTENDED_REGISTERS); |
| 62 ASSERT_WOW64_EQUIVALENT(CONTEXT_FULL); |
| 63 ASSERT_WOW64_EQUIVALENT(CONTEXT_ALL); |
| 64 ASSERT_WOW64_EQUIVALENT(CONTEXT_XSTATE); |
| 65 #undef ASSERT_WOW64_EQUIVALENT |
| 66 #endif // ARCH_CPU_X86 |
| 67 |
31 memset(out, 0, sizeof(*out)); | 68 memset(out, 0, sizeof(*out)); |
32 | 69 |
33 // We assume in this function that the WOW64_CONTEXT_* and x86 CONTEXT_* | 70 LOG_IF(ERROR, !HasContextPart(context, WOW64_CONTEXT_i386)) |
34 // values for ContextFlags are identical. | 71 << "non-x86 context"; |
35 | 72 |
36 if (context.ContextFlags & WOW64_CONTEXT_CONTROL) { | 73 if (HasContextPart(context, WOW64_CONTEXT_CONTROL)) { |
37 out->ebp = context.Ebp; | 74 out->ebp = context.Ebp; |
38 out->eip = context.Eip; | 75 out->eip = context.Eip; |
39 out->cs = static_cast<uint16_t>(context.SegCs); | 76 out->cs = static_cast<uint16_t>(context.SegCs); |
40 out->eflags = context.EFlags; | 77 out->eflags = context.EFlags; |
41 out->esp = context.Esp; | 78 out->esp = context.Esp; |
42 out->ss = static_cast<uint16_t>(context.SegSs); | 79 out->ss = static_cast<uint16_t>(context.SegSs); |
43 } | 80 } |
44 | 81 |
45 if (context.ContextFlags & WOW64_CONTEXT_INTEGER) { | 82 if (HasContextPart(context, WOW64_CONTEXT_INTEGER)) { |
46 out->eax = context.Eax; | 83 out->eax = context.Eax; |
47 out->ebx = context.Ebx; | 84 out->ebx = context.Ebx; |
48 out->ecx = context.Ecx; | 85 out->ecx = context.Ecx; |
49 out->edx = context.Edx; | 86 out->edx = context.Edx; |
50 out->edi = context.Edi; | 87 out->edi = context.Edi; |
51 out->esi = context.Esi; | 88 out->esi = context.Esi; |
52 } | 89 } |
53 | 90 |
54 if (context.ContextFlags & WOW64_CONTEXT_SEGMENTS) { | 91 if (HasContextPart(context, WOW64_CONTEXT_SEGMENTS)) { |
55 out->ds = static_cast<uint16_t>(context.SegDs); | 92 out->ds = static_cast<uint16_t>(context.SegDs); |
56 out->es = static_cast<uint16_t>(context.SegEs); | 93 out->es = static_cast<uint16_t>(context.SegEs); |
57 out->fs = static_cast<uint16_t>(context.SegFs); | 94 out->fs = static_cast<uint16_t>(context.SegFs); |
58 out->gs = static_cast<uint16_t>(context.SegGs); | 95 out->gs = static_cast<uint16_t>(context.SegGs); |
59 } | 96 } |
60 | 97 |
61 if (context.ContextFlags & WOW64_CONTEXT_DEBUG_REGISTERS) { | 98 if (HasContextPart(context, WOW64_CONTEXT_DEBUG_REGISTERS)) { |
62 out->dr0 = context.Dr0; | 99 out->dr0 = context.Dr0; |
63 out->dr1 = context.Dr1; | 100 out->dr1 = context.Dr1; |
64 out->dr2 = context.Dr2; | 101 out->dr2 = context.Dr2; |
65 out->dr3 = context.Dr3; | 102 out->dr3 = context.Dr3; |
| 103 |
66 // DR4 and DR5 are obsolete synonyms for DR6 and DR7, see | 104 // DR4 and DR5 are obsolete synonyms for DR6 and DR7, see |
67 // https://en.wikipedia.org/wiki/X86_debug_register. | 105 // https://en.wikipedia.org/wiki/X86_debug_register. |
68 out->dr4 = context.Dr6; | 106 out->dr4 = context.Dr6; |
69 out->dr5 = context.Dr7; | 107 out->dr5 = context.Dr7; |
| 108 |
70 out->dr6 = context.Dr6; | 109 out->dr6 = context.Dr6; |
71 out->dr7 = context.Dr7; | 110 out->dr7 = context.Dr7; |
72 } | 111 } |
73 | 112 |
74 if (context.ContextFlags & WOW64_CONTEXT_EXTENDED_REGISTERS) { | 113 if (HasContextPart(context, WOW64_CONTEXT_EXTENDED_REGISTERS)) { |
75 static_assert(sizeof(out->fxsave) == sizeof(context.ExtendedRegisters), | 114 static_assert(sizeof(out->fxsave) == sizeof(context.ExtendedRegisters), |
76 "types must be equivalent"); | 115 "fxsave types must be equivalent"); |
77 memcpy(&out->fxsave, &context.ExtendedRegisters, sizeof(out->fxsave)); | 116 memcpy(&out->fxsave, &context.ExtendedRegisters, sizeof(out->fxsave)); |
78 } else if (context.ContextFlags & WOW64_CONTEXT_FLOATING_POINT) { | 117 } else if (HasContextPart(context, WOW64_CONTEXT_FLOATING_POINT)) { |
79 CHECK(false) << "TODO(scottmg): extract x87 data"; | 118 // The static_assert that validates this cast can’t be here because it |
| 119 // relies on field names that vary based on the template parameter. |
| 120 CPUContextX86::FsaveToFxsave( |
| 121 *reinterpret_cast<const CPUContextX86::Fsave*>(&context.FloatSave), |
| 122 &out->fxsave); |
80 } | 123 } |
81 } | 124 } |
82 | 125 |
83 } // namespace | 126 } // namespace |
84 | 127 |
85 #if defined(ARCH_CPU_64_BITS) | 128 #if defined(ARCH_CPU_64_BITS) |
86 | 129 |
87 void InitializeX86Context(const WOW64_CONTEXT& context, CPUContextX86* out) { | 130 void InitializeX86Context(const WOW64_CONTEXT& context, CPUContextX86* out) { |
88 CommonInitializeX86Context(context, out); | 131 CommonInitializeX86Context(context, out); |
89 } | 132 } |
90 | 133 |
91 void InitializeX64Context(const CONTEXT& context, CPUContextX86_64* out) { | 134 void InitializeX64Context(const CONTEXT& context, CPUContextX86_64* out) { |
92 memset(out, 0, sizeof(*out)); | 135 memset(out, 0, sizeof(*out)); |
93 | 136 |
94 LOG_IF(ERROR, !(context.ContextFlags & CONTEXT_AMD64)) << "non-x64 context"; | 137 LOG_IF(ERROR, !HasContextPart(context, CONTEXT_AMD64)) << "non-x64 context"; |
95 | 138 |
96 if (context.ContextFlags & CONTEXT_CONTROL) { | 139 if (HasContextPart(context, CONTEXT_CONTROL)) { |
97 out->cs = context.SegCs; | 140 out->cs = context.SegCs; |
98 out->rflags = context.EFlags; | 141 out->rflags = context.EFlags; |
99 out->rip = context.Rip; | 142 out->rip = context.Rip; |
100 out->rsp = context.Rsp; | 143 out->rsp = context.Rsp; |
101 // SegSs ignored. | 144 // SegSs ignored. |
102 } | 145 } |
103 | 146 |
104 if (context.ContextFlags & CONTEXT_INTEGER) { | 147 if (HasContextPart(context, CONTEXT_INTEGER)) { |
105 out->rax = context.Rax; | 148 out->rax = context.Rax; |
106 out->rbx = context.Rbx; | 149 out->rbx = context.Rbx; |
107 out->rcx = context.Rcx; | 150 out->rcx = context.Rcx; |
108 out->rdx = context.Rdx; | 151 out->rdx = context.Rdx; |
109 out->rdi = context.Rdi; | 152 out->rdi = context.Rdi; |
110 out->rsi = context.Rsi; | 153 out->rsi = context.Rsi; |
111 out->rbp = context.Rbp; | 154 out->rbp = context.Rbp; |
112 out->r8 = context.R8; | 155 out->r8 = context.R8; |
113 out->r9 = context.R9; | 156 out->r9 = context.R9; |
114 out->r10 = context.R10; | 157 out->r10 = context.R10; |
115 out->r11 = context.R11; | 158 out->r11 = context.R11; |
116 out->r12 = context.R12; | 159 out->r12 = context.R12; |
117 out->r13 = context.R13; | 160 out->r13 = context.R13; |
118 out->r14 = context.R14; | 161 out->r14 = context.R14; |
119 out->r15 = context.R15; | 162 out->r15 = context.R15; |
120 } | 163 } |
121 | 164 |
122 if (context.ContextFlags & CONTEXT_SEGMENTS) { | 165 if (HasContextPart(context, CONTEXT_SEGMENTS)) { |
123 out->fs = context.SegFs; | 166 out->fs = context.SegFs; |
124 out->gs = context.SegGs; | 167 out->gs = context.SegGs; |
125 // SegDs ignored. | 168 // SegDs ignored. |
126 // SegEs ignored. | 169 // SegEs ignored. |
127 } | 170 } |
128 | 171 |
129 if (context.ContextFlags & CONTEXT_DEBUG_REGISTERS) { | 172 if (HasContextPart(context, CONTEXT_DEBUG_REGISTERS)) { |
130 out->dr0 = context.Dr0; | 173 out->dr0 = context.Dr0; |
131 out->dr1 = context.Dr1; | 174 out->dr1 = context.Dr1; |
132 out->dr2 = context.Dr2; | 175 out->dr2 = context.Dr2; |
133 out->dr3 = context.Dr3; | 176 out->dr3 = context.Dr3; |
| 177 |
134 // DR4 and DR5 are obsolete synonyms for DR6 and DR7, see | 178 // DR4 and DR5 are obsolete synonyms for DR6 and DR7, see |
135 // https://en.wikipedia.org/wiki/X86_debug_register. | 179 // https://en.wikipedia.org/wiki/X86_debug_register. |
136 out->dr4 = context.Dr6; | 180 out->dr4 = context.Dr6; |
137 out->dr5 = context.Dr7; | 181 out->dr5 = context.Dr7; |
| 182 |
138 out->dr6 = context.Dr6; | 183 out->dr6 = context.Dr6; |
139 out->dr7 = context.Dr7; | 184 out->dr7 = context.Dr7; |
140 } | 185 } |
141 | 186 |
142 if (context.ContextFlags & CONTEXT_FLOATING_POINT) { | 187 if (HasContextPart(context, CONTEXT_FLOATING_POINT)) { |
143 static_assert(sizeof(out->fxsave) == sizeof(context.FltSave), | 188 static_assert(sizeof(out->fxsave) == sizeof(context.FltSave), |
144 "types must be equivalent"); | 189 "types must be equivalent"); |
145 memcpy(&out->fxsave, &context.FltSave.ControlWord, sizeof(out->fxsave)); | 190 memcpy(&out->fxsave, &context.FltSave, sizeof(out->fxsave)); |
146 } | 191 } |
147 } | 192 } |
148 | 193 |
149 #else // ARCH_CPU_64_BITS | 194 #else // ARCH_CPU_64_BITS |
150 | 195 |
151 void InitializeX86Context(const CONTEXT& context, CPUContextX86* out) { | 196 void InitializeX86Context(const CONTEXT& context, CPUContextX86* out) { |
152 CommonInitializeX86Context(context, out); | 197 CommonInitializeX86Context(context, out); |
153 } | 198 } |
154 | 199 |
155 #endif // ARCH_CPU_64_BITS | 200 #endif // ARCH_CPU_64_BITS |
156 | 201 |
157 } // namespace crashpad | 202 } // namespace crashpad |
OLD | NEW |