Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(385)

Side by Side Diff: src/heap.cc

Issue 10091027: Process weak references between optimized JSFunctions on scavenges. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: fix incremental marking and slot recording Created 8 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/incremental-marking.cc » ('j') | src/objects-visiting.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1106 matching lines...) Expand 10 before | Expand all | Expand 10 after
1117 1117
1118 while (head_start != head_end) { 1118 while (head_start != head_end) {
1119 int size = static_cast<int>(*(head_start++)); 1119 int size = static_cast<int>(*(head_start++));
1120 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++)); 1120 HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++));
1121 emergency_stack_->Add(Entry(obj, size)); 1121 emergency_stack_->Add(Entry(obj, size));
1122 } 1122 }
1123 rear_ = head_end; 1123 rear_ = head_end;
1124 } 1124 }
1125 1125
1126 1126
1127 class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
1128 public:
1129 explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
1130
1131 virtual Object* RetainAs(Object* object) {
1132 if (!heap_->InFromSpace(object)) {
1133 return object;
1134 }
1135
1136 MapWord map_word = HeapObject::cast(object)->map_word();
1137 if (map_word.IsForwardingAddress()) {
1138 return map_word.ToForwardingAddress();
1139 }
1140 return NULL;
1141 }
1142
1143 private:
1144 Heap* heap_;
1145 };
1146
1147
1127 void Heap::Scavenge() { 1148 void Heap::Scavenge() {
1128 #ifdef DEBUG 1149 #ifdef DEBUG
1129 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); 1150 if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
1130 #endif 1151 #endif
1131 1152
1132 gc_state_ = SCAVENGE; 1153 gc_state_ = SCAVENGE;
1133 1154
1134 // Implements Cheney's copying algorithm 1155 // Implements Cheney's copying algorithm
1135 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1156 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1136 1157
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
1215 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1236 &UpdateNewSpaceReferenceInExternalStringTableEntry);
1216 1237
1217 promotion_queue_.Destroy(); 1238 promotion_queue_.Destroy();
1218 1239
1219 LiveObjectList::UpdateReferencesForScavengeGC(); 1240 LiveObjectList::UpdateReferencesForScavengeGC();
1220 if (!FLAG_watch_ic_patching) { 1241 if (!FLAG_watch_ic_patching) {
1221 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); 1242 isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1222 } 1243 }
1223 incremental_marking()->UpdateMarkingDequeAfterScavenge(); 1244 incremental_marking()->UpdateMarkingDequeAfterScavenge();
1224 1245
1246 ScavengeWeakObjectRetainer weak_object_retainer(this);
1247 ProcessWeakReferences(&weak_object_retainer);
1248
1225 ASSERT(new_space_front == new_space_.top()); 1249 ASSERT(new_space_front == new_space_.top());
1226 1250
1227 // Set age mark. 1251 // Set age mark.
1228 new_space_.set_age_mark(new_space_.top()); 1252 new_space_.set_age_mark(new_space_.top());
1229 1253
1230 new_space_.LowerInlineAllocationLimit( 1254 new_space_.LowerInlineAllocationLimit(
1231 new_space_.inline_allocation_limit_step()); 1255 new_space_.inline_allocation_limit_step());
1232 1256
1233 // Update how much has survived scavenge. 1257 // Update how much has survived scavenge.
1234 IncrementYoungSurvivorsCounter(static_cast<int>( 1258 IncrementYoungSurvivorsCounter(static_cast<int>(
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
1301 Object** end = start + external_string_table_.old_space_strings_.length(); 1325 Object** end = start + external_string_table_.old_space_strings_.length();
1302 for (Object** p = start; p < end; ++p) *p = updater_func(this, p); 1326 for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1303 } 1327 }
1304 1328
1305 UpdateNewSpaceReferencesInExternalStringTable(updater_func); 1329 UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1306 } 1330 }
1307 1331
1308 1332
1309 static Object* ProcessFunctionWeakReferences(Heap* heap, 1333 static Object* ProcessFunctionWeakReferences(Heap* heap,
1310 Object* function, 1334 Object* function,
1311 WeakObjectRetainer* retainer) { 1335 WeakObjectRetainer* retainer,
1336 bool record_slots) {
1312 Object* undefined = heap->undefined_value(); 1337 Object* undefined = heap->undefined_value();
1313 Object* head = undefined; 1338 Object* head = undefined;
1314 JSFunction* tail = NULL; 1339 JSFunction* tail = NULL;
1315 Object* candidate = function; 1340 Object* candidate = function;
1316 while (candidate != undefined) { 1341 while (candidate != undefined) {
1317 // Check whether to keep the candidate in the list. 1342 // Check whether to keep the candidate in the list.
1318 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate); 1343 JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1319 Object* retain = retainer->RetainAs(candidate); 1344 Object* retain = retainer->RetainAs(candidate);
1320 if (retain != NULL) { 1345 if (retain != NULL) {
1321 if (head == undefined) { 1346 if (head == undefined) {
1322 // First element in the list. 1347 // First element in the list.
1323 head = retain; 1348 head = retain;
1324 } else { 1349 } else {
1325 // Subsequent elements in the list. 1350 // Subsequent elements in the list.
1326 ASSERT(tail != NULL); 1351 ASSERT(tail != NULL);
1327 tail->set_next_function_link(retain); 1352 tail->set_next_function_link(retain);
1353 if (record_slots) {
1354 Object** next_function =
1355 HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
1356 heap->mark_compact_collector()->RecordSlot(
1357 next_function, next_function, retain);
1358 }
1328 } 1359 }
1329 // Retained function is new tail. 1360 // Retained function is new tail.
1330 candidate_function = reinterpret_cast<JSFunction*>(retain); 1361 candidate_function = reinterpret_cast<JSFunction*>(retain);
1331 tail = candidate_function; 1362 tail = candidate_function;
1332 1363
1333 ASSERT(retain->IsUndefined() || retain->IsJSFunction()); 1364 ASSERT(retain->IsUndefined() || retain->IsJSFunction());
1334 1365
1335 if (retain == undefined) break; 1366 if (retain == undefined) break;
1336 } 1367 }
1337 1368
1338 // Move to next element in the list. 1369 // Move to next element in the list.
1339 candidate = candidate_function->next_function_link(); 1370 candidate = candidate_function->next_function_link();
1340 } 1371 }
1341 1372
1342 // Terminate the list if there is one or more elements. 1373 // Terminate the list if there is one or more elements.
1343 if (tail != NULL) { 1374 if (tail != NULL) {
1344 tail->set_next_function_link(undefined); 1375 tail->set_next_function_link(undefined);
1345 } 1376 }
1346 1377
1347 return head; 1378 return head;
1348 } 1379 }
1349 1380
1350 1381
1351 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { 1382 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1352 Object* undefined = undefined_value(); 1383 Object* undefined = undefined_value();
1353 Object* head = undefined; 1384 Object* head = undefined;
1354 Context* tail = NULL; 1385 Context* tail = NULL;
1355 Object* candidate = global_contexts_list_; 1386 Object* candidate = global_contexts_list_;
1387
1388 // We don't record weak slots during marking or scavenges.
1389 // Instead we do it once when we complete mark-compact cycle.
1390 // Note that write barrier has not effect if we are already the middle of
Michael Starzinger 2012/04/17 09:53:18 Missing "in" and s/not/no/
1391 // compacting mark-sweep cycle and we have to record slots manually.
1392 bool record_slots =
1393 gc_state() == MARK_COMPACT &&
1394 mark_compact_collector()->is_compacting();
1395
1356 while (candidate != undefined) { 1396 while (candidate != undefined) {
1357 // Check whether to keep the candidate in the list. 1397 // Check whether to keep the candidate in the list.
1358 Context* candidate_context = reinterpret_cast<Context*>(candidate); 1398 Context* candidate_context = reinterpret_cast<Context*>(candidate);
1359 Object* retain = retainer->RetainAs(candidate); 1399 Object* retain = retainer->RetainAs(candidate);
1360 if (retain != NULL) { 1400 if (retain != NULL) {
1361 if (head == undefined) { 1401 if (head == undefined) {
1362 // First element in the list. 1402 // First element in the list.
1363 head = retain; 1403 head = retain;
1364 } else { 1404 } else {
1365 // Subsequent elements in the list. 1405 // Subsequent elements in the list.
1366 ASSERT(tail != NULL); 1406 ASSERT(tail != NULL);
1367 tail->set_unchecked(this, 1407 tail->set_unchecked(this,
1368 Context::NEXT_CONTEXT_LINK, 1408 Context::NEXT_CONTEXT_LINK,
1369 retain, 1409 retain,
1370 UPDATE_WRITE_BARRIER); 1410 UPDATE_WRITE_BARRIER);
1411
1412 if (record_slots) {
1413 Object** next_context =
1414 HeapObject::RawField(
1415 tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
1416 mark_compact_collector()->RecordSlot(
1417 next_context, next_context, retain);
1418 }
1371 } 1419 }
1372 // Retained context is new tail. 1420 // Retained context is new tail.
1373 candidate_context = reinterpret_cast<Context*>(retain); 1421 candidate_context = reinterpret_cast<Context*>(retain);
1374 tail = candidate_context; 1422 tail = candidate_context;
1375 1423
1376 if (retain == undefined) break; 1424 if (retain == undefined) break;
1377 1425
1378 // Process the weak list of optimized functions for the context. 1426 // Process the weak list of optimized functions for the context.
1379 Object* function_list_head = 1427 Object* function_list_head =
1380 ProcessFunctionWeakReferences( 1428 ProcessFunctionWeakReferences(
1381 this, 1429 this,
1382 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST), 1430 candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1383 retainer); 1431 retainer,
1432 record_slots);
1384 candidate_context->set_unchecked(this, 1433 candidate_context->set_unchecked(this,
1385 Context::OPTIMIZED_FUNCTIONS_LIST, 1434 Context::OPTIMIZED_FUNCTIONS_LIST,
1386 function_list_head, 1435 function_list_head,
1387 UPDATE_WRITE_BARRIER); 1436 UPDATE_WRITE_BARRIER);
1437 if (record_slots) {
1438 Object** optimized_functions =
1439 HeapObject::RawField(
1440 tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
1441 mark_compact_collector()->RecordSlot(
1442 optimized_functions, optimized_functions, function_list_head);
1443 }
1388 } 1444 }
1389 1445
1390 // Move to next element in the list. 1446 // Move to next element in the list.
1391 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK); 1447 candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
1392 } 1448 }
1393 1449
1394 // Terminate the list if there is one or more elements. 1450 // Terminate the list if there is one or more elements.
1395 if (tail != NULL) { 1451 if (tail != NULL) {
1396 tail->set_unchecked(this, 1452 tail->set_unchecked(this,
1397 Context::NEXT_CONTEXT_LINK, 1453 Context::NEXT_CONTEXT_LINK,
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after
1636 if (maybe_result->ToObject(&result)) { 1692 if (maybe_result->ToObject(&result)) {
1637 HeapObject* target = HeapObject::cast(result); 1693 HeapObject* target = HeapObject::cast(result);
1638 1694
1639 // Order is important: slot might be inside of the target if target 1695 // Order is important: slot might be inside of the target if target
1640 // was allocated over a dead object and slot comes from the store 1696 // was allocated over a dead object and slot comes from the store
1641 // buffer. 1697 // buffer.
1642 *slot = target; 1698 *slot = target;
1643 MigrateObject(heap, object, target, object_size); 1699 MigrateObject(heap, object, target, object_size);
1644 1700
1645 if (object_contents == POINTER_OBJECT) { 1701 if (object_contents == POINTER_OBJECT) {
1646 heap->promotion_queue()->insert(target, object_size); 1702 if (map->instance_type() == JS_FUNCTION_TYPE) {
1703 heap->promotion_queue()->insert(
1704 target, JSFunction::kNonWeakFieldsEndOffset);
1705 } else {
1706 heap->promotion_queue()->insert(target, object_size);
1707 }
1647 } 1708 }
1648 1709
1649 heap->tracer()->increment_promoted_objects_size(object_size); 1710 heap->tracer()->increment_promoted_objects_size(object_size);
1650 return; 1711 return;
1651 } 1712 }
1652 } 1713 }
1653 MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size); 1714 MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size);
1654 heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); 1715 heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
1655 Object* result = allocation->ToObjectUnchecked(); 1716 Object* result = allocation->ToObjectUnchecked();
1656 HeapObject* target = HeapObject::cast(result); 1717 HeapObject* target = HeapObject::cast(result);
(...skipping 5349 matching lines...) Expand 10 before | Expand all | Expand 10 after
7006 } else { 7067 } else {
7007 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died. 7068 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died.
7008 } 7069 }
7009 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = 7070 remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
7010 reinterpret_cast<Address>(p); 7071 reinterpret_cast<Address>(p);
7011 remembered_unmapped_pages_index_++; 7072 remembered_unmapped_pages_index_++;
7012 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages; 7073 remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
7013 } 7074 }
7014 7075
7015 } } // namespace v8::internal 7076 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/incremental-marking.cc » ('j') | src/objects-visiting.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698