OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #ifndef VM_HANDLES_IMPL_H_ | 5 #ifndef VM_HANDLES_IMPL_H_ |
6 #define VM_HANDLES_IMPL_H_ | 6 #define VM_HANDLES_IMPL_H_ |
7 | 7 |
| 8 #include "vm/heap.h" |
| 9 #include "vm/heap_trace.h" |
8 #include "vm/visitor.h" | 10 #include "vm/visitor.h" |
9 | 11 |
10 namespace dart { | 12 namespace dart { |
11 | 13 |
12 DECLARE_DEBUG_FLAG(bool, trace_handles_count); | 14 DECLARE_DEBUG_FLAG(bool, trace_handles_count); |
13 | 15 |
14 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 16 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
15 void Handles<kHandleSizeInWords, | 17 void Handles<kHandleSizeInWords, |
16 kHandlesPerChunk, | 18 kHandlesPerChunk, |
17 kOffsetOfRawPtr>::VisitObjectPointers( | 19 kOffsetOfRawPtr>::VisitObjectPointers( |
18 ObjectPointerVisitor* visitor) { | 20 ObjectPointerVisitor* visitor) { |
19 // Visit all zone handles. | 21 // Visit all zone handles. |
20 HandlesBlock* block = zone_blocks_; | 22 HandlesBlock* block = zone_blocks_; |
21 while (block != NULL) { | 23 while (block != NULL) { |
22 block->VisitObjectPointers(visitor); | 24 block->VisitObjectPointers(visitor); |
23 block = block->next_block(); | 25 block = block->next_block(); |
24 } | 26 } |
25 | 27 |
26 // Visit all scoped handles. | 28 // Visit all scoped handles. |
27 block = &first_scoped_block_; | 29 VisitScopedHandles(visitor); |
| 30 } |
| 31 |
| 32 |
| 33 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 34 void Handles<kHandleSizeInWords, |
| 35 kHandlesPerChunk, |
| 36 kOffsetOfRawPtr>::VisitScopedHandles( |
| 37 ObjectPointerVisitor* visitor) { |
| 38 HandlesBlock* block = &first_scoped_block_; |
28 do { | 39 do { |
29 block->VisitObjectPointers(visitor); | 40 block->VisitObjectPointers(visitor); |
30 block = block->next_block(); | 41 block = block->next_block(); |
31 } while (block != NULL); | 42 } while (block != NULL); |
32 } | 43 } |
33 | 44 |
34 | 45 |
35 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 46 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
36 void Handles<kHandleSizeInWords, | 47 void Handles<kHandleSizeInWords, |
37 kHandlesPerChunk, | 48 kHandlesPerChunk, |
| 49 kOffsetOfRawPtr>::VisitUnvisitedScopedHandles( |
| 50 ObjectPointerVisitor* visitor) { |
| 51 HandlesBlock* block = &first_scoped_block_; |
| 52 while (block != NULL && block != last_visited_block_) { |
| 53 block->VisitUnvisitedObjectPointers(visitor); |
| 54 block = block->next_block(); |
| 55 } |
| 56 // We want this to point to first_scoped_block.next, |
| 57 // Because pointers are still being added to first_scoped_block |
| 58 // So it may be "partially new", and require a partial scan. |
| 59 last_visited_block_ = first_scoped_block_.next_block(); |
| 60 } |
| 61 |
| 62 |
| 63 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 64 void Handles<kHandleSizeInWords, |
| 65 kHandlesPerChunk, |
38 kOffsetOfRawPtr>::Visit(HandleVisitor* visitor) { | 66 kOffsetOfRawPtr>::Visit(HandleVisitor* visitor) { |
39 // Visit all zone handles. | 67 // Visit all zone handles. |
40 HandlesBlock* block = zone_blocks_; | 68 HandlesBlock* block = zone_blocks_; |
41 while (block != NULL) { | 69 while (block != NULL) { |
42 block->Visit(visitor); | 70 block->Visit(visitor); |
43 block = block->next_block(); | 71 block = block->next_block(); |
44 } | 72 } |
45 | 73 |
46 // Visit all scoped handles. | 74 // Visit all scoped handles. |
47 block = &first_scoped_block_; | 75 block = &first_scoped_block_; |
(...skipping 28 matching lines...) Expand all Loading... |
76 // this appropriately. | 104 // this appropriately. |
77 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 105 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
78 uword Handles<kHandleSizeInWords, | 106 uword Handles<kHandleSizeInWords, |
79 kHandlesPerChunk, | 107 kHandlesPerChunk, |
80 kOffsetOfRawPtr>::AllocateZoneHandle(Isolate* isolate) { | 108 kOffsetOfRawPtr>::AllocateZoneHandle(Isolate* isolate) { |
81 ASSERT(isolate != NULL); | 109 ASSERT(isolate != NULL); |
82 ASSERT(isolate->current_zone() != NULL); | 110 ASSERT(isolate->current_zone() != NULL); |
83 ASSERT(isolate->no_handle_scope_depth() == 0); | 111 ASSERT(isolate->no_handle_scope_depth() == 0); |
84 Handles* handles = isolate->current_zone()->handles(); | 112 Handles* handles = isolate->current_zone()->handles(); |
85 ASSERT(handles != NULL); | 113 ASSERT(handles != NULL); |
86 return handles->AllocateHandleInZone(); | 114 uword address = handles->AllocateHandleInZone(); |
| 115 if (HeapTrace::is_enabled()) { |
| 116 uword zone_addr = reinterpret_cast<uword>(isolate->current_zone()); |
| 117 isolate->heap()->trace()->TraceAllocateZoneHandle(address, zone_addr); |
| 118 } |
| 119 return address; |
87 } | 120 } |
88 | 121 |
89 | 122 |
90 // Figure out the current zone using the current Isolate and | 123 // Figure out the current zone using the current Isolate and |
91 // check if the specified handle has been allocated in this zone. | 124 // check if the specified handle has been allocated in this zone. |
92 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 125 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
93 bool Handles<kHandleSizeInWords, | 126 bool Handles<kHandleSizeInWords, |
94 kHandlesPerChunk, | 127 kHandlesPerChunk, |
95 kOffsetOfRawPtr>::IsZoneHandle(uword handle) { | 128 kOffsetOfRawPtr>::IsZoneHandle(uword handle) { |
96 // TODO(5411412): Accessing the current isolate is a performance problem, | 129 // TODO(5411412): Accessing the current isolate is a performance problem, |
97 // consider passing it down as a parameter. | 130 // consider passing it down as a parameter. |
98 Isolate* isolate = Isolate::Current(); | 131 Isolate* isolate = Isolate::Current(); |
99 ASSERT(isolate != NULL); | 132 ASSERT(isolate != NULL); |
100 ASSERT(isolate->current_zone() != NULL); | 133 ASSERT(isolate->current_zone() != NULL); |
101 Handles* handles = isolate->current_zone()->handles(); | 134 Handles* handles = isolate->current_zone()->handles(); |
102 ASSERT(handles != NULL); | 135 ASSERT(handles != NULL); |
103 return handles->IsValidZoneHandle(handle); | 136 return handles->IsValidZoneHandle(handle); |
104 } | 137 } |
105 | 138 |
106 | 139 |
107 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 140 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
108 void Handles<kHandleSizeInWords, | 141 void Handles<kHandleSizeInWords, |
109 kHandlesPerChunk, | 142 kHandlesPerChunk, |
110 kOffsetOfRawPtr>::DeleteAll() { | 143 kOffsetOfRawPtr>::DeleteAll() { |
111 // Delete all the zone allocated handle blocks. | 144 // Delete all the zone allocated handle blocks. |
| 145 // GCTrace does not need to trace this call to DeleteHandleBlocks, |
| 146 // since the individual zone deletions will be caught |
| 147 // by instrumentation in the BaseZone destructor. |
112 DeleteHandleBlocks(zone_blocks_); | 148 DeleteHandleBlocks(zone_blocks_); |
113 zone_blocks_ = NULL; | 149 zone_blocks_ = NULL; |
114 | 150 |
115 // Delete all the scoped handle blocks. | 151 // Delete all the scoped handle blocks. |
| 152 // Do not trace if there is no current isolate. This can happen during |
| 153 // isolate shutdown. |
| 154 if (HeapTrace::is_enabled() && Isolate::Current() != NULL) { |
| 155 Isolate::Current()->heap()->trace()->TraceDeleteScopedHandles(); |
| 156 } |
| 157 |
| 158 |
116 scoped_blocks_ = first_scoped_block_.next_block(); | 159 scoped_blocks_ = first_scoped_block_.next_block(); |
117 DeleteHandleBlocks(scoped_blocks_); | 160 DeleteHandleBlocks(scoped_blocks_); |
118 first_scoped_block_.ReInit(); | 161 first_scoped_block_.ReInit(); |
119 scoped_blocks_ = &first_scoped_block_; | 162 scoped_blocks_ = &first_scoped_block_; |
120 } | 163 } |
121 | 164 |
122 | 165 |
123 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 166 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
124 void Handles<kHandleSizeInWords, | 167 void Handles<kHandleSizeInWords, |
125 kHandlesPerChunk, | 168 kHandlesPerChunk, |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
295 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { | 338 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |
296 visitor->VisitPointer( | 339 visitor->VisitPointer( |
297 reinterpret_cast<RawObject**>(&data_[i + kOffsetOfRawPtr/kWordSize])); | 340 reinterpret_cast<RawObject**>(&data_[i + kOffsetOfRawPtr/kWordSize])); |
298 } | 341 } |
299 } | 342 } |
300 | 343 |
301 | 344 |
302 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 345 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
303 void Handles<kHandleSizeInWords, | 346 void Handles<kHandleSizeInWords, |
304 kHandlesPerChunk, | 347 kHandlesPerChunk, |
| 348 kOffsetOfRawPtr>::HandlesBlock::VisitUnvisitedObjectPointers( |
| 349 ObjectPointerVisitor* visitor) { |
| 350 ASSERT(visitor != NULL); |
| 351 |
| 352 // last_visited_handle_ picks up where we were last time, |
| 353 // so there is nothing in the intialization position of this for loop. |
| 354 |
| 355 while (last_visited_handle_ < next_handle_slot_) { |
| 356 last_visited_handle_ += kHandleSizeInWords; |
| 357 uword* addr = &data_[last_visited_handle_ + kOffsetOfRawPtr / kWordSize]; |
| 358 visitor->VisitPointer(reinterpret_cast<RawObject**>(addr)); |
| 359 } |
| 360 } |
| 361 |
| 362 |
| 363 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 364 void Handles<kHandleSizeInWords, |
| 365 kHandlesPerChunk, |
305 kOffsetOfRawPtr>::HandlesBlock::Visit(HandleVisitor* visitor) { | 366 kOffsetOfRawPtr>::HandlesBlock::Visit(HandleVisitor* visitor) { |
306 ASSERT(visitor != NULL); | 367 ASSERT(visitor != NULL); |
307 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { | 368 for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |
308 visitor->VisitHandle(reinterpret_cast<uword>(&data_[i])); | 369 visitor->VisitHandle(reinterpret_cast<uword>(&data_[i])); |
309 } | 370 } |
310 } | 371 } |
311 | 372 |
312 | 373 |
313 #if defined(DEBUG) | 374 #if defined(DEBUG) |
314 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 375 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
(...skipping 13 matching lines...) Expand all Loading... |
328 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> | 389 template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
329 int Handles<kHandleSizeInWords, | 390 int Handles<kHandleSizeInWords, |
330 kHandlesPerChunk, | 391 kHandlesPerChunk, |
331 kOffsetOfRawPtr>::HandlesBlock::HandleCount() const { | 392 kOffsetOfRawPtr>::HandlesBlock::HandleCount() const { |
332 return (next_handle_slot_ / kHandleSizeInWords); | 393 return (next_handle_slot_ / kHandleSizeInWords); |
333 } | 394 } |
334 | 395 |
335 } // namespace dart | 396 } // namespace dart |
336 | 397 |
337 #endif // VM_HANDLES_IMPL_H_ | 398 #endif // VM_HANDLES_IMPL_H_ |
OLD | NEW |