OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
129 Isolate* isolate); | 129 Isolate* isolate); |
130 | 130 |
131 static void RecordWriteForEvacuationFromCode(HeapObject* obj, | 131 static void RecordWriteForEvacuationFromCode(HeapObject* obj, |
132 Object** slot, | 132 Object** slot, |
133 Isolate* isolate); | 133 Isolate* isolate); |
134 | 134 |
135 // Record a slot for compaction. Returns false for objects that are | 135 // Record a slot for compaction. Returns false for objects that are |
136 // guaranteed to be rescanned or not guaranteed to survive. | 136 // guaranteed to be rescanned or not guaranteed to survive. |
137 // | 137 // |
138 // No slots in white objects should be recorded, as some slots are typed and | 138 // No slots in white objects should be recorded, as some slots are typed and |
139 // cannot be interpreted corrrectly if the underlying object does not survive | 139 // cannot be interpreted correctly if the underlying object does not survive |
140 // the incremental cycle (stays white). | 140 // the incremental cycle (stays white). |
141 INLINE(bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value)); | 141 INLINE(bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value)); |
142 INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value)); | 142 INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value)); |
143 INLINE(void RecordWriteIntoCode(HeapObject* obj, | 143 INLINE(void RecordWriteIntoCode(HeapObject* obj, |
144 RelocInfo* rinfo, | 144 RelocInfo* rinfo, |
145 Object* value)); | 145 Object* value)); |
146 INLINE(void RecordWriteOfCodeEntry(JSFunction* host, | 146 INLINE(void RecordWriteOfCodeEntry(JSFunction* host, |
147 Object** slot, | 147 Object** slot, |
148 Code* value)); | 148 Code* value)); |
149 | 149 |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
280 intptr_t allocated_; | 280 intptr_t allocated_; |
281 | 281 |
282 int no_marking_scope_depth_; | 282 int no_marking_scope_depth_; |
283 | 283 |
284 DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking); | 284 DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking); |
285 }; | 285 }; |
286 | 286 |
287 } } // namespace v8::internal | 287 } } // namespace v8::internal |
288 | 288 |
289 #endif // V8_INCREMENTAL_MARKING_H_ | 289 #endif // V8_INCREMENTAL_MARKING_H_ |
OLD | NEW |