Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(461)

Side by Side Diff: src/mark-compact.cc

Issue 11575007: Make embedded maps in optimized code weak. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mark-compact.h ('k') | src/objects.h » ('j') | src/objects.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 806 matching lines...) Expand 10 before | Expand all | Expand 10 after
817 space->PrepareForMarkCompact(); 817 space->PrepareForMarkCompact();
818 } 818 }
819 819
820 #ifdef VERIFY_HEAP 820 #ifdef VERIFY_HEAP
821 if (!was_marked_incrementally_ && FLAG_verify_heap) { 821 if (!was_marked_incrementally_ && FLAG_verify_heap) {
822 VerifyMarkbitsAreClean(); 822 VerifyMarkbitsAreClean();
823 } 823 }
824 #endif 824 #endif
825 } 825 }
826 826
827 class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
828 public:
829 virtual bool TakeFunction(JSFunction* function) {
830 return function->code()->marked_for_deoptimization();
831 }
832 };
833
827 834
828 void MarkCompactCollector::Finish() { 835 void MarkCompactCollector::Finish() {
829 #ifdef DEBUG 836 #ifdef DEBUG
830 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); 837 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
831 state_ = IDLE; 838 state_ = IDLE;
832 #endif 839 #endif
833 // The stub cache is not traversed during GC; clear the cache to 840 // The stub cache is not traversed during GC; clear the cache to
834 // force lazy re-initialization of it. This must be done after the 841 // force lazy re-initialization of it. This must be done after the
835 // GC, because it relies on the new address of certain old space 842 // GC, because it relies on the new address of certain old space
836 // objects (empty string, illegal builtin). 843 // objects (empty string, illegal builtin).
837 heap()->isolate()->stub_cache()->Clear(); 844 heap()->isolate()->stub_cache()->Clear();
845
846 DeoptimizeMarkedCodeFilter filter;
847 Deoptimizer::DeoptimizeAllFunctionsWith(&filter);
838 } 848 }
839 849
840 850
841 // ------------------------------------------------------------------------- 851 // -------------------------------------------------------------------------
842 // Phase 1: tracing and marking live objects. 852 // Phase 1: tracing and marking live objects.
843 // before: all objects are in normal state. 853 // before: all objects are in normal state.
844 // after: a live object's map pointer is marked as '00'. 854 // after: a live object's map pointer is marked as '00'.
845 855
846 // Marking all live objects in the heap as part of mark-sweep or mark-compact 856 // Marking all live objects in the heap as part of mark-sweep or mark-compact
847 // collection. Before marking, all objects are in their normal state. After 857 // collection. Before marking, all objects are in their normal state. After
(...skipping 1285 matching lines...) Expand 10 before | Expand all | Expand 10 after
2133 // Iterate over the map space, setting map transitions that go from 2143 // Iterate over the map space, setting map transitions that go from
2134 // a marked map to an unmarked map to null transitions. This action 2144 // a marked map to an unmarked map to null transitions. This action
2135 // is carried out only on maps of JSObjects and related subtypes. 2145 // is carried out only on maps of JSObjects and related subtypes.
2136 for (HeapObject* obj = map_iterator.Next(); 2146 for (HeapObject* obj = map_iterator.Next();
2137 obj != NULL; obj = map_iterator.Next()) { 2147 obj != NULL; obj = map_iterator.Next()) {
2138 Map* map = reinterpret_cast<Map*>(obj); 2148 Map* map = reinterpret_cast<Map*>(obj);
2139 MarkBit map_mark = Marking::MarkBitFrom(map); 2149 MarkBit map_mark = Marking::MarkBitFrom(map);
2140 if (map->IsFreeSpace()) continue; 2150 if (map->IsFreeSpace()) continue;
2141 2151
2142 ASSERT(map->IsMap()); 2152 ASSERT(map->IsMap());
2143 // Only JSObject and subtypes have map transitions and back pointers. 2153 if (!map->CanTransition()) continue;
2144 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
2145 if (map->instance_type() < FIRST_JS_OBJECT_TYPE) continue;
2146 2154
2147 if (map_mark.Get() && 2155 if (map_mark.Get() &&
2148 map->attached_to_shared_function_info()) { 2156 map->attached_to_shared_function_info()) {
2149 // This map is used for inobject slack tracking and has been detached 2157 // This map is used for inobject slack tracking and has been detached
2150 // from SharedFunctionInfo during the mark phase. 2158 // from SharedFunctionInfo during the mark phase.
2151 // Since it survived the GC, reattach it now. 2159 // Since it survived the GC, reattach it now.
2152 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); 2160 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
2153 } 2161 }
2154 2162
2155 ClearNonLivePrototypeTransitions(map); 2163 ClearNonLivePrototypeTransitions(map);
2156 ClearNonLiveMapTransitions(map, map_mark); 2164 ClearNonLiveMapTransitions(map, map_mark);
2165
2166 if (map_mark.Get()) {
2167 ClearNonLiveDependentCodes(map);
2168 } else {
2169 DeoptimizeLiveDependentCodes(map);
2170 }
2157 } 2171 }
2158 } 2172 }
2159 2173
2160 2174
2161 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { 2175 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
2162 int number_of_transitions = map->NumberOfProtoTransitions(); 2176 int number_of_transitions = map->NumberOfProtoTransitions();
2163 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); 2177 FixedArray* prototype_transitions = map->GetPrototypeTransitions();
2164 2178
2165 int new_number_of_transitions = 0; 2179 int new_number_of_transitions = 0;
2166 const int header = Map::kProtoTransitionHeaderSize; 2180 const int header = Map::kProtoTransitionHeaderSize;
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2215 // Follow back pointer, check whether we are dealing with a map transition 2229 // Follow back pointer, check whether we are dealing with a map transition
2216 // from a live map to a dead path and in case clear transitions of parent. 2230 // from a live map to a dead path and in case clear transitions of parent.
2217 bool current_is_alive = map_mark.Get(); 2231 bool current_is_alive = map_mark.Get();
2218 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); 2232 bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
2219 if (!current_is_alive && parent_is_alive) { 2233 if (!current_is_alive && parent_is_alive) {
2220 parent->ClearNonLiveTransitions(heap()); 2234 parent->ClearNonLiveTransitions(heap());
2221 } 2235 }
2222 } 2236 }
2223 2237
2224 2238
2239 void MarkCompactCollector::DeoptimizeLiveDependentCodes(Map* map) {
2240 AssertNoAllocation no_allocation_scope;
2241 FixedArray* codes = map->dependent_codes();
2242 int number_of_codes = DependentCodes::number_of_codes(codes);
2243 if (number_of_codes == 0) return;
2244 for (int i = 0; i < number_of_codes; i++) {
2245 Code* code = DependentCodes::code(codes, i);
2246 if (IsMarked(code) && !code->marked_for_deoptimization()) {
2247 code->set_marked_for_deoptimization(true);
2248 }
2249 DependentCodes::clear_code(codes, i);
2250 }
2251 map->set_dependent_codes(heap()->empty_fixed_array());
2252 }
2253
2254
2255 void MarkCompactCollector::ClearNonLiveDependentCodes(Map* map) {
2256 AssertNoAllocation no_allocation_scope;
2257 FixedArray* codes = map->dependent_codes();
2258 int number_of_codes = DependentCodes::number_of_codes(codes);
2259 if (number_of_codes == 0) return;
2260 int new_number_of_codes = 0;
2261 for (int i = 0; i < number_of_codes; i++) {
2262 Code* code = DependentCodes::code(codes, i);
2263 if (IsMarked(code) && !code->marked_for_deoptimization()) {
2264 if (new_number_of_codes != i) {
2265 DependentCodes::set_code(codes, new_number_of_codes, code);
2266 Object** slot = DependentCodes::code_slot(codes, new_number_of_codes);
2267 RecordSlot(slot, slot, code);
2268 new_number_of_codes++;
2269 }
2270 }
2271 }
2272 for (int i = new_number_of_codes; i < number_of_codes; i++) {
2273 DependentCodes::clear_code(codes, i);
2274 }
2275 DependentCodes::set_number_of_codes(codes, new_number_of_codes);
2276 }
2277
2278
2225 void MarkCompactCollector::ProcessWeakMaps() { 2279 void MarkCompactCollector::ProcessWeakMaps() {
2226 Object* weak_map_obj = encountered_weak_maps(); 2280 Object* weak_map_obj = encountered_weak_maps();
2227 while (weak_map_obj != Smi::FromInt(0)) { 2281 while (weak_map_obj != Smi::FromInt(0)) {
2228 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); 2282 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj)));
2229 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); 2283 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
2230 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); 2284 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
2231 Object** anchor = reinterpret_cast<Object**>(table->address()); 2285 Object** anchor = reinterpret_cast<Object**>(table->address());
2232 for (int i = 0; i < table->Capacity(); i++) { 2286 for (int i = 0; i < table->Capacity(); i++) {
2233 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) { 2287 if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2234 Object** key_slot = 2288 Object** key_slot =
(...skipping 1579 matching lines...) Expand 10 before | Expand all | Expand 10 after
3814 while (buffer != NULL) { 3868 while (buffer != NULL) {
3815 SlotsBuffer* next_buffer = buffer->next(); 3869 SlotsBuffer* next_buffer = buffer->next();
3816 DeallocateBuffer(buffer); 3870 DeallocateBuffer(buffer);
3817 buffer = next_buffer; 3871 buffer = next_buffer;
3818 } 3872 }
3819 *buffer_address = NULL; 3873 *buffer_address = NULL;
3820 } 3874 }
3821 3875
3822 3876
3823 } } // namespace v8::internal 3877 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/mark-compact.h ('k') | src/objects.h » ('j') | src/objects.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698