OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
414 if (FLAG_verify_native_context_separation) { | 414 if (FLAG_verify_native_context_separation) { |
415 VerifyNativeContextSeparation(heap_); | 415 VerifyNativeContextSeparation(heap_); |
416 } | 416 } |
417 #endif | 417 #endif |
418 | 418 |
419 #ifdef VERIFY_HEAP | 419 #ifdef VERIFY_HEAP |
420 if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code && | 420 if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_optimized_code && |
421 heap()->weak_embedded_maps_verification_enabled()) { | 421 heap()->weak_embedded_maps_verification_enabled()) { |
422 VerifyWeakEmbeddedMapsInOptimizedCode(); | 422 VerifyWeakEmbeddedMapsInOptimizedCode(); |
423 } | 423 } |
| 424 if (FLAG_collect_maps && FLAG_omit_prototype_checks_for_leaf_maps) { |
| 425 VerifyOmittedPrototypeChecks(); |
| 426 } |
424 #endif | 427 #endif |
425 | 428 |
426 Finish(); | 429 Finish(); |
427 | 430 |
428 if (marking_parity_ == EVEN_MARKING_PARITY) { | 431 if (marking_parity_ == EVEN_MARKING_PARITY) { |
429 marking_parity_ = ODD_MARKING_PARITY; | 432 marking_parity_ = ODD_MARKING_PARITY; |
430 } else { | 433 } else { |
431 ASSERT(marking_parity_ == ODD_MARKING_PARITY); | 434 ASSERT(marking_parity_ == ODD_MARKING_PARITY); |
432 marking_parity_ = EVEN_MARKING_PARITY; | 435 marking_parity_ = EVEN_MARKING_PARITY; |
433 } | 436 } |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
480 HeapObjectIterator code_iterator(heap()->code_space()); | 483 HeapObjectIterator code_iterator(heap()->code_space()); |
481 for (HeapObject* obj = code_iterator.Next(); | 484 for (HeapObject* obj = code_iterator.Next(); |
482 obj != NULL; | 485 obj != NULL; |
483 obj = code_iterator.Next()) { | 486 obj = code_iterator.Next()) { |
484 Code* code = Code::cast(obj); | 487 Code* code = Code::cast(obj); |
485 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; | 488 if (code->kind() != Code::OPTIMIZED_FUNCTION) continue; |
486 if (code->marked_for_deoptimization()) continue; | 489 if (code->marked_for_deoptimization()) continue; |
487 code->VerifyEmbeddedMapsDependency(); | 490 code->VerifyEmbeddedMapsDependency(); |
488 } | 491 } |
489 } | 492 } |
| 493 |
| 494 |
| 495 void MarkCompactCollector::VerifyOmittedPrototypeChecks() { |
| 496 HeapObjectIterator iterator(heap()->map_space()); |
| 497 for (HeapObject* obj = iterator.Next(); |
| 498 obj != NULL; |
| 499 obj = iterator.Next()) { |
| 500 Map* map = Map::cast(obj); |
| 501 map->VerifyOmittedPrototypeChecks(); |
| 502 } |
| 503 } |
490 #endif // VERIFY_HEAP | 504 #endif // VERIFY_HEAP |
491 | 505 |
492 | 506 |
493 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { | 507 static void ClearMarkbitsInPagedSpace(PagedSpace* space) { |
494 PageIterator it(space); | 508 PageIterator it(space); |
495 | 509 |
496 while (it.has_next()) { | 510 while (it.has_next()) { |
497 Bitmap::Clear(it.next()); | 511 Bitmap::Clear(it.next()); |
498 } | 512 } |
499 } | 513 } |
(...skipping 1785 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2285 // This map is used for inobject slack tracking and has been detached | 2299 // This map is used for inobject slack tracking and has been detached |
2286 // from SharedFunctionInfo during the mark phase. | 2300 // from SharedFunctionInfo during the mark phase. |
2287 // Since it survived the GC, reattach it now. | 2301 // Since it survived the GC, reattach it now. |
2288 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); | 2302 map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map); |
2289 } | 2303 } |
2290 | 2304 |
2291 ClearNonLivePrototypeTransitions(map); | 2305 ClearNonLivePrototypeTransitions(map); |
2292 ClearNonLiveMapTransitions(map, map_mark); | 2306 ClearNonLiveMapTransitions(map, map_mark); |
2293 | 2307 |
2294 if (map_mark.Get()) { | 2308 if (map_mark.Get()) { |
2295 ClearNonLiveDependentCodes(map); | 2309 ClearNonLiveDependentCode(map); |
2296 } else { | 2310 } else { |
2297 ClearAndDeoptimizeDependentCodes(map); | 2311 ClearAndDeoptimizeDependentCode(map); |
2298 } | 2312 } |
2299 } | 2313 } |
2300 } | 2314 } |
2301 | 2315 |
2302 | 2316 |
2303 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { | 2317 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { |
2304 int number_of_transitions = map->NumberOfProtoTransitions(); | 2318 int number_of_transitions = map->NumberOfProtoTransitions(); |
2305 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); | 2319 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); |
2306 | 2320 |
2307 int new_number_of_transitions = 0; | 2321 int new_number_of_transitions = 0; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2357 // Follow back pointer, check whether we are dealing with a map transition | 2371 // Follow back pointer, check whether we are dealing with a map transition |
2358 // from a live map to a dead path and in case clear transitions of parent. | 2372 // from a live map to a dead path and in case clear transitions of parent. |
2359 bool current_is_alive = map_mark.Get(); | 2373 bool current_is_alive = map_mark.Get(); |
2360 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); | 2374 bool parent_is_alive = Marking::MarkBitFrom(parent).Get(); |
2361 if (!current_is_alive && parent_is_alive) { | 2375 if (!current_is_alive && parent_is_alive) { |
2362 parent->ClearNonLiveTransitions(heap()); | 2376 parent->ClearNonLiveTransitions(heap()); |
2363 } | 2377 } |
2364 } | 2378 } |
2365 | 2379 |
2366 | 2380 |
2367 void MarkCompactCollector::ClearAndDeoptimizeDependentCodes(Map* map) { | 2381 void MarkCompactCollector::ClearAndDeoptimizeDependentCode(Map* map) { |
2368 AssertNoAllocation no_allocation_scope; | 2382 AssertNoAllocation no_allocation_scope; |
2369 DependentCodes* codes = map->dependent_codes(); | 2383 DependentCode* entries = map->dependent_code(); |
2370 int number_of_codes = codes->number_of_codes(); | 2384 DependentCode::GroupStartIndexes starts(entries); |
2371 if (number_of_codes == 0) return; | 2385 int number_of_entries = starts.number_of_entries(); |
2372 for (int i = 0; i < number_of_codes; i++) { | 2386 if (number_of_entries == 0) return; |
2373 Code* code = codes->code_at(i); | 2387 for (int i = 0; i < number_of_entries; i++) { |
| 2388 Code* code = entries->code_at(i); |
2374 if (IsMarked(code) && !code->marked_for_deoptimization()) { | 2389 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
2375 code->set_marked_for_deoptimization(true); | 2390 code->set_marked_for_deoptimization(true); |
2376 } | 2391 } |
2377 codes->clear_code_at(i); | 2392 entries->clear_code_at(i); |
2378 } | 2393 } |
2379 map->set_dependent_codes(DependentCodes::cast(heap()->empty_fixed_array())); | 2394 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); |
2380 } | 2395 } |
2381 | 2396 |
2382 | 2397 |
2383 void MarkCompactCollector::ClearNonLiveDependentCodes(Map* map) { | 2398 void MarkCompactCollector::ClearNonLiveDependentCode(Map* map) { |
2384 AssertNoAllocation no_allocation_scope; | 2399 AssertNoAllocation no_allocation_scope; |
2385 DependentCodes* codes = map->dependent_codes(); | 2400 DependentCode* entries = map->dependent_code(); |
2386 int number_of_codes = codes->number_of_codes(); | 2401 DependentCode::GroupStartIndexes starts(entries); |
2387 if (number_of_codes == 0) return; | 2402 int number_of_entries = starts.number_of_entries(); |
2388 int new_number_of_codes = 0; | 2403 if (number_of_entries == 0) return; |
2389 for (int i = 0; i < number_of_codes; i++) { | 2404 int new_number_of_entries = 0; |
2390 Code* code = codes->code_at(i); | 2405 // Go through all groups, remove dead codes and compact. |
2391 if (IsMarked(code) && !code->marked_for_deoptimization()) { | 2406 for (int g = 0; g < DependentCode::kGroupCount; g++) { |
2392 if (new_number_of_codes != i) { | 2407 int group_number_of_entries = 0; |
2393 codes->set_code_at(new_number_of_codes, code); | 2408 for (int i = starts.at(g); i < starts.at(g + 1); i++) { |
| 2409 Code* code = entries->code_at(i); |
| 2410 if (IsMarked(code) && !code->marked_for_deoptimization()) { |
| 2411 if (new_number_of_entries + group_number_of_entries != i) { |
| 2412 entries->set_code_at(new_number_of_entries + |
| 2413 group_number_of_entries, code); |
| 2414 } |
| 2415 Object** slot = entries->code_slot_at(new_number_of_entries + |
| 2416 group_number_of_entries); |
| 2417 RecordSlot(slot, slot, code); |
| 2418 group_number_of_entries++; |
2394 } | 2419 } |
2395 Object** slot = codes->code_slot_at(new_number_of_codes); | |
2396 RecordSlot(slot, slot, code); | |
2397 new_number_of_codes++; | |
2398 } | 2420 } |
| 2421 entries->set_number_of_entries( |
| 2422 static_cast<DependentCode::DependencyGroup>(g), |
| 2423 group_number_of_entries); |
| 2424 new_number_of_entries += group_number_of_entries; |
2399 } | 2425 } |
2400 for (int i = new_number_of_codes; i < number_of_codes; i++) { | 2426 for (int i = new_number_of_entries; i < number_of_entries; i++) { |
2401 codes->clear_code_at(i); | 2427 entries->clear_code_at(i); |
2402 } | 2428 } |
2403 codes->set_number_of_codes(new_number_of_codes); | |
2404 } | 2429 } |
2405 | 2430 |
2406 | 2431 |
2407 void MarkCompactCollector::ProcessWeakMaps() { | 2432 void MarkCompactCollector::ProcessWeakMaps() { |
2408 Object* weak_map_obj = encountered_weak_maps(); | 2433 Object* weak_map_obj = encountered_weak_maps(); |
2409 while (weak_map_obj != Smi::FromInt(0)) { | 2434 while (weak_map_obj != Smi::FromInt(0)) { |
2410 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); | 2435 ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj))); |
2411 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); | 2436 JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj); |
2412 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); | 2437 ObjectHashTable* table = ObjectHashTable::cast(weak_map->table()); |
2413 Object** anchor = reinterpret_cast<Object**>(table->address()); | 2438 Object** anchor = reinterpret_cast<Object**>(table->address()); |
(...skipping 1656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4070 while (buffer != NULL) { | 4095 while (buffer != NULL) { |
4071 SlotsBuffer* next_buffer = buffer->next(); | 4096 SlotsBuffer* next_buffer = buffer->next(); |
4072 DeallocateBuffer(buffer); | 4097 DeallocateBuffer(buffer); |
4073 buffer = next_buffer; | 4098 buffer = next_buffer; |
4074 } | 4099 } |
4075 *buffer_address = NULL; | 4100 *buffer_address = NULL; |
4076 } | 4101 } |
4077 | 4102 |
4078 | 4103 |
4079 } } // namespace v8::internal | 4104 } } // namespace v8::internal |
OLD | NEW |