OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE file. | |
4 | |
5 #include "vm/heap_trace.h" | |
6 | |
7 #include "include/dart_api.h" | |
8 #include "vm/dart_api_state.h" | |
9 #include "vm/debugger.h" | |
10 #include "vm/isolate.h" | |
11 #include "vm/object.h" | |
12 #include "vm/object_set.h" | |
13 #include "vm/object_store.h" | |
14 #include "vm/os.h" | |
15 #include "vm/stack_frame.h" | |
16 #include "vm/unicode.h" | |
17 | |
18 namespace dart { | |
19 | |
20 DEFINE_FLAG(bool, heap_trace, false, "Enable heap tracing."); | |
21 | |
22 Dart_FileOpenCallback HeapTrace::open_callback_ = NULL; | |
23 Dart_FileWriteCallback HeapTrace::write_callback_ = NULL; | |
24 Dart_FileCloseCallback HeapTrace::close_callback_ = NULL; | |
25 bool HeapTrace::is_enabled_ = false; | |
26 | |
27 class HeapTraceVisitor : public ObjectPointerVisitor { | |
28 public: | |
29 HeapTraceVisitor(Isolate* isolate, | |
30 HeapTrace* heap_trace, | |
31 ObjectSet* object_set) | |
32 : ObjectPointerVisitor(isolate), | |
33 heap_trace_(heap_trace), | |
34 vm_isolate_(Dart::vm_isolate()), | |
35 object_set_(object_set) { | |
36 } | |
37 | |
38 void VisitPointers(RawObject** first, RawObject** last) { | |
siva
2012/12/14 02:11:54
Can this be a const function? Here and other class
cshapiro
2012/12/15 19:56:49
Probably not, at least not without defining a cons
| |
39 for (RawObject** current = first; current <= last; current++) { | |
40 RawObject* raw_obj = *current; | |
41 | |
42 // We only care about objects in the heap | |
43 // Also, since this visitor will frequently be encountering redudant | |
44 // roots, we use an object_set to skip the duplicates. | |
45 if (raw_obj->IsHeapObject() && | |
46 raw_obj != reinterpret_cast<RawObject*>(0x1) && | |
47 raw_obj != reinterpret_cast<RawObject*>(0xabababab) && | |
48 !object_set_->Contains(raw_obj) && | |
49 !vm_isolate_->heap()->Contains(RawObject::ToAddr(raw_obj))) { | |
50 object_set_->Add(raw_obj); | |
51 uword addr = RawObject::ToAddr(raw_obj); | |
52 heap_trace_->TraceSingleRoot(addr); | |
53 } | |
54 } | |
55 } | |
56 | |
57 private: | |
58 HeapTrace* heap_trace_; | |
59 Isolate* vm_isolate_; | |
60 // TODO(nricci): replace this with a map or something else sparse. | |
siva
2012/12/14 02:11:54
Do you want to retain Nathan's handle in the TODO
cshapiro
2012/12/15 19:56:49
Fixed.
| |
61 ObjectSet* object_set_; | |
siva
2012/12/14 02:11:54
Missing DISALLOW stuff.
cshapiro
2012/12/15 19:56:49
Done.
| |
62 }; | |
63 | |
64 | |
65 class HeapTraceScopedHandleVisitor : public ObjectPointerVisitor { | |
66 public: | |
67 HeapTraceScopedHandleVisitor(Isolate* isolate, HeapTrace* heap_trace) | |
68 : ObjectPointerVisitor(isolate), heap_trace_(heap_trace) { | |
69 } | |
70 | |
71 void VisitPointers(RawObject** first, RawObject** last) { | |
72 for (RawObject** current = first; current <= last; current++) { | |
73 RawObject* raw_obj = *current; | |
74 Heap* heap = isolate()->heap(); | |
75 | |
76 // We only care about objects in the heap | |
77 if (raw_obj->IsHeapObject() && | |
78 raw_obj != reinterpret_cast<RawObject*>(0x1) && | |
79 raw_obj != reinterpret_cast<RawObject*>(0xabababab) && | |
80 heap->Contains(RawObject::ToAddr(raw_obj))) { | |
81 uword addr = RawObject::ToAddr(raw_obj); | |
82 heap_trace_->TraceScopedHandle(addr); | |
83 } | |
84 } | |
85 } | |
86 | |
87 private: | |
88 HeapTrace* heap_trace_; | |
siva
2012/12/14 02:11:54
Ditto.
cshapiro
2012/12/15 19:56:49
Done.
| |
89 }; | |
90 | |
91 | |
92 class HeapTraceObjectStoreVisitor : public ObjectPointerVisitor { | |
93 public: | |
94 HeapTraceObjectStoreVisitor(Isolate* isolate, HeapTrace* heap_trace) | |
95 : ObjectPointerVisitor(isolate), heap_trace_(heap_trace) { | |
96 } | |
97 | |
98 void VisitPointers(RawObject** first, RawObject** last) { | |
99 for (RawObject** current = first; current <= last; current++) { | |
100 RawObject* raw_obj = *current; | |
101 | |
102 // We only care about obects in the heap | |
103 if (raw_obj->IsHeapObject() && | |
104 raw_obj != reinterpret_cast<RawObject*>(0x1) && | |
105 raw_obj != reinterpret_cast<RawObject*>(0xabababab)) { | |
106 uword addr = RawObject::ToAddr(raw_obj); | |
107 heap_trace_->TraceObjectStorePointer(addr); | |
108 } | |
109 } | |
110 } | |
111 | |
112 private: | |
113 HeapTrace* heap_trace_; | |
siva
2012/12/14 02:11:54
Ditto.
cshapiro
2012/12/15 19:56:49
Done.
| |
114 }; | |
115 | |
116 | |
117 class HeapTraceInitialHeapVisitor : public ObjectVisitor { | |
118 public: | |
119 HeapTraceInitialHeapVisitor(Isolate* isolate, HeapTrace* heap_trace) | |
120 : ObjectVisitor(isolate), heap_trace_(heap_trace) {} | |
121 | |
122 void VisitObject(RawObject* raw_obj) { | |
123 heap_trace_->TraceSnapshotAlloc(raw_obj, raw_obj->Size()); | |
124 } | |
125 | |
126 private: | |
127 HeapTrace* heap_trace_; | |
siva
2012/12/14 02:11:54
Ditto.
cshapiro
2012/12/15 19:56:49
Done.
| |
128 }; | |
129 | |
130 | |
131 HeapTrace::HeapTrace() : isolate_initialized_(false), output_stream_(NULL) { | |
132 } | |
133 | |
134 | |
135 HeapTrace::~HeapTrace() { | |
136 if (isolate_initialized_) { | |
137 (*close_callback_)(output_stream_); | |
138 } | |
139 } | |
140 | |
141 | |
142 void HeapTrace::InitOnce(Dart_FileOpenCallback open_callback, | |
143 Dart_FileWriteCallback write_callback, | |
144 Dart_FileCloseCallback close_callback) { | |
145 ASSERT(open_callback != NULL); | |
146 ASSERT(write_callback != NULL); | |
147 ASSERT(close_callback != NULL); | |
148 HeapTrace::open_callback_ = open_callback; | |
149 HeapTrace::write_callback_ = write_callback; | |
150 HeapTrace::close_callback_ = close_callback; | |
151 HeapTrace::is_enabled_ = true; | |
152 } | |
153 | |
154 | |
155 ObjectSet* HeapTrace::CreateEmptyObjectSet() const { | |
156 Isolate* isolate = Isolate::Current(); | |
157 uword start, end; | |
158 isolate->heap()->StartEndAddress(&start, &end); | |
159 | |
160 Isolate* vm_isolate = Dart::vm_isolate(); | |
161 uword vm_start, vm_end; | |
162 vm_isolate->heap()->StartEndAddress(&vm_start, &vm_end); | |
163 | |
164 ObjectSet* allocated_set = new ObjectSet(Utils::Minimum(start, vm_start), | |
165 Utils::Maximum(end, vm_end)); | |
166 | |
167 return allocated_set; | |
168 } | |
169 | |
170 | |
171 void HeapTrace::ResizeObjectSet() { | |
172 Isolate* isolate = Isolate::Current(); | |
173 uword start, end; | |
174 isolate->heap()->StartEndAddress(&start, &end); | |
175 Isolate* vm_isolate = Dart::vm_isolate(); | |
176 uword vm_start, vm_end; | |
177 vm_isolate->heap()->StartEndAddress(&vm_start, &vm_end); | |
178 object_set_.Resize(Utils::Minimum(start, vm_start), | |
179 Utils::Maximum(end, vm_end)); | |
180 } | |
181 | |
182 | |
183 void HeapTrace::Init(Isolate* isolate) { | |
184 // Do not trace the VM isolate | |
185 if (isolate == Dart::vm_isolate()) { | |
186 return; | |
187 } | |
188 ASSERT(isolate_initialized_ == false); | |
189 const char* format = "%s.htrace"; | |
190 intptr_t len = OS::SNPrint(NULL, 0, format, isolate->name()); | |
191 char* filename = new char[len + 1]; | |
192 OS::SNPrint(filename, len + 1, format, isolate->name()); | |
193 output_stream_ = (*open_callback_)(filename); | |
194 ASSERT(output_stream_ != NULL); | |
195 delete[] filename; | |
196 isolate_initialized_ = true; | |
197 | |
198 HeapTraceObjectStoreVisitor object_store_visitor(isolate, this); | |
199 isolate->object_store()->VisitObjectPointers(&object_store_visitor); | |
200 | |
201 // Visit any objects that may have been allocated during startup, | |
202 // before we started tracing. | |
203 HeapTraceInitialHeapVisitor heap_visitor(isolate, this); | |
204 isolate->heap()->IterateObjects(&heap_visitor); | |
205 TraceRoots(isolate); | |
206 } | |
207 | |
208 | |
209 // Allocation Record - 'A' (0x41) | |
210 // | |
211 // Format: | |
212 // 'A' | |
213 // uword - address of allocated object | |
214 // uword - size of allocated object | |
215 void HeapTrace::TraceAllocation(uword addr, intptr_t size) { | |
216 if (isolate_initialized_) { | |
217 { | |
218 AllocationRecord rec(this); | |
219 rec.Write(addr); | |
220 rec.Write(size); | |
221 } | |
222 TraceRoots(Isolate::Current()); | |
223 } | |
224 } | |
225 | |
226 | |
227 // Snapshot Allocation Record - 'B' (0x41) | |
228 // | |
229 // Format: | |
230 // 'B' | |
231 // uword - address of allocated object | |
232 // uword - size of allocated object | |
233 void HeapTrace::TraceSnapshotAlloc(RawObject* obj, intptr_t size) { | |
234 if (isolate_initialized_) { | |
235 SnapshotAllocationRecord rec(this); | |
236 rec.Write(RawObject::ToAddr(obj)); | |
237 rec.Write(static_cast<uword>(size)); | |
238 } | |
239 } | |
240 | |
241 | |
242 // Allocate Zone Handle Record - 'Z' (0x5a) | |
243 // | |
244 // Format: | |
245 // 'Z' | |
246 // uword - handle address (where the handle is pointing) | |
247 // uword - zone address (address of the zone the handle is in) | |
248 void HeapTrace::TraceAllocateZoneHandle(uword handle, uword zone_addr) { | |
249 if (isolate_initialized_) { | |
250 AllocZoneHandleRecord rec(this); | |
251 rec.Write(handle); | |
252 rec.Write(zone_addr); | |
253 } | |
254 } | |
255 | |
256 | |
257 // Delete Zone Record - 'z' (0x7a) | |
258 // | |
259 // Format: | |
260 // 'z' | |
261 // uword - zone address (all the handles in that zone are now gone) | |
262 void HeapTrace::TraceDeleteZone(Zone* zone) { | |
263 if (isolate_initialized_) { | |
264 DeleteZoneRecord rec(this); | |
265 rec.Write(reinterpret_cast<uword>(zone)); | |
266 } | |
267 } | |
268 | |
269 | |
270 // Delete Scoped Hanldes Record - 's' (0x73) | |
271 // | |
272 // Format: | |
273 // 's' | |
274 void HeapTrace::TraceDeleteScopedHandles() { | |
275 if (isolate_initialized_) { | |
276 DeleteScopedHandlesRecord rec(this); | |
277 } | |
278 } | |
279 | |
280 | |
281 // Copy Record - 'C' (0x43) | |
282 // | |
283 // Format: | |
284 // 'C' | |
285 // uword - old address | |
286 // uword - new address | |
287 void HeapTrace::TraceCopy(uword from_addr, uword to_addr) { | |
288 if (isolate_initialized_) { | |
289 CopyRecord rec(this); | |
290 rec.Write(from_addr); | |
291 rec.Write(to_addr); | |
292 } | |
293 } | |
294 | |
295 | |
296 // Object Store Recorda - 'O'(0x4f) | |
297 // | |
298 // Format: | |
299 // 'O' | |
300 // uword - address | |
301 void HeapTrace::TraceObjectStorePointer(uword addr) { | |
302 if (isolate_initialized_) { | |
303 ObjectStoreRecord rec(this); | |
304 rec.Write(addr); | |
305 } | |
306 } | |
307 | |
308 | |
309 // Promotion Records - 'P' (0x50) | |
310 // | |
311 // Format: | |
312 // 'P' | |
313 // uword - old address | |
314 // uword - new address | |
315 void HeapTrace::TracePromotion(uword old_addr, uword promoted_addr) { | |
316 if (isolate_initialized_) { | |
317 PromotionRecord rec(this); | |
318 rec.Write(old_addr); | |
319 rec.Write(promoted_addr); | |
320 } | |
321 } | |
322 | |
323 | |
324 // Death Range Record - 'L' (0x4c) | |
325 // | |
326 // Format: | |
327 // 'L' | |
328 // uword - inclusive start address of the space being left | |
329 // uword - exclusive end address of the space being left | |
330 void HeapTrace::TraceDeathRange(uword inclusive_start, uword exclusive_end) { | |
331 if (isolate_initialized_) { | |
332 DeathRangeRecord rec(this); | |
333 rec.Write(inclusive_start); | |
334 rec.Write(exclusive_end); | |
335 } | |
336 } | |
337 | |
338 | |
339 // Register Class Record - 'K' (0x4b) | |
340 // | |
341 // Format: | |
342 // 'K' | |
343 // uword - address ( the address of the class) | |
344 void HeapTrace::TraceRegisterClass(const Class& cls) { | |
345 if (isolate_initialized_) { | |
346 RegisterClassRecord rec(this); | |
347 rec.Write(RawObject::ToAddr(cls.raw())); | |
348 } | |
349 } | |
350 | |
351 | |
352 // Scoped Handle Record - 'H' (0x48) | |
353 // | |
354 // Format: | |
355 // 'H' | |
356 // uword - adress of the scoped handle (where it is pointing) | |
357 void HeapTrace::TraceScopedHandle(uword handle) { | |
358 if (isolate_initialized_) { | |
359 AllocScopedHandleRecord rec(this); | |
360 rec.Write(handle); | |
361 } | |
362 } | |
363 | |
364 | |
365 // Root Record - 'R' (0x52) | |
366 // | |
367 // Format: | |
368 // 'R' | |
369 // uword - address | |
370 void HeapTrace::TraceSingleRoot(uword root_addr) { | |
371 if (isolate_initialized_) { | |
372 RootRecord rec(this); | |
373 rec.Write(root_addr); | |
374 } | |
375 } | |
376 | |
377 | |
378 // Sweep Record - 'S' | |
379 // | |
380 // Format: | |
381 // 'S' | |
382 // uword - address | |
383 void HeapTrace::TraceSweep(uword sweept_addr) { | |
384 if (isolate_initialized_) { | |
385 SweepRecord rec(this); | |
386 rec.Write(sweept_addr); | |
387 } | |
388 } | |
389 | |
390 | |
391 // Does not output any records directly, | |
392 // but does call TraceSingleRoot | |
393 void HeapTrace::TraceRoots(Isolate* isolate) { | |
394 if (isolate_initialized_) { | |
395 ResizeObjectSet(); // TODO(nricci): map or something? | |
396 HeapTraceVisitor visitor(isolate, this, &object_set_); | |
397 HeapTraceScopedHandleVisitor handle_visitor(isolate, this); | |
398 | |
399 bool visit_prologue_weak_handles = true; | |
400 bool validate_frames = false; | |
401 | |
402 // Visit objects in per isolate stubs. | |
403 StubCode::VisitObjectPointers(&visitor); | |
404 | |
405 // stack | |
406 StackFrameIterator frames_iterator(validate_frames); | |
407 StackFrame* frame = frames_iterator.NextFrame(); | |
408 while (frame != NULL) { | |
409 frame->VisitObjectPointers(&visitor); | |
410 frame = frames_iterator.NextFrame(); | |
411 } | |
412 | |
413 if (isolate->api_state() != NULL) { | |
414 isolate->api_state()->VisitObjectPointers(&visitor, | |
415 visit_prologue_weak_handles); | |
416 } | |
417 | |
418 // Visit the top context which is stored in the isolate. | |
419 RawContext* top_context = isolate->top_context(); | |
420 visitor.VisitPointer(reinterpret_cast<RawObject**>(&top_context)); | |
421 | |
422 // Visit the currently active IC data array. | |
423 RawArray* ic_data_array = isolate->ic_data_array(); | |
424 visitor.VisitPointer(reinterpret_cast<RawObject**>(&ic_data_array)); | |
425 | |
426 // Visit objects in the debugger. | |
427 isolate->debugger()->VisitObjectPointers(&visitor); | |
428 | |
429 isolate->current_zone()->handles()-> | |
430 VisitUnvisitedScopedHandles(&handle_visitor); | |
431 | |
432 object_set_.FastClear(); | |
433 } | |
434 } | |
435 | |
436 | |
437 // Store Record - 'U' (0x55) | |
438 // | |
439 // Format: | |
440 // 'U' | |
441 // uword - originating object address (where a pointer is being stored) | |
442 // uword - byte offset into origin where the pointer is being stored | |
443 // uword - value of the pointer being stored | |
444 void HeapTrace::TraceStoreIntoObject(uword object, | |
445 uword field_addr, | |
446 uword value) { | |
447 if (isolate_initialized_) { | |
448 // We don't care about pointers into the VM_Islate heap, so skip them. | |
449 // There should not be any pointers /out/ of the VM isolate; so we | |
450 // do not check object | |
451 if (Isolate::Current()->heap()->Contains(value)) { | |
452 StoreRecord rec(this); | |
453 uword slot_offset = field_addr - object; | |
454 | |
455 rec.Write(object); | |
456 rec.Write(slot_offset); | |
457 rec.Write(value); | |
458 } | |
459 } | |
460 } | |
461 | |
462 | |
463 // Mark Sweep Start Record - '{' (0x7b) | |
464 // | |
465 // Format: | |
466 // '{' | |
467 void HeapTrace::TraceMarkSweepStart() { | |
468 if (isolate_initialized_) { | |
469 MarkSweepStartRecord rec(this); | |
470 } | |
471 } | |
472 | |
473 | |
474 // Mark Sweep Finish Record - '}' (0x7d) | |
475 // | |
476 // Format: | |
477 // '}' | |
478 void HeapTrace::TraceMarkSweepFinish() { | |
479 if (isolate_initialized_) { | |
480 MarkSweepFinishRecord rec(this); | |
481 } | |
482 } | |
483 | |
484 } // namespace dart | |
OLD | NEW |