Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(183)

Side by Side Diff: src/heap.cc

Issue 10832342: Rename "global context" to "native context", (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 8 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« src/heap.h ('K') | « src/heap.h ('k') | src/hydrogen.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
168 intptr_t max_virtual = OS::MaxVirtualMemory(); 168 intptr_t max_virtual = OS::MaxVirtualMemory();
169 169
170 if (max_virtual > 0) { 170 if (max_virtual > 0) {
171 if (code_range_size_ > 0) { 171 if (code_range_size_ > 0) {
172 // Reserve no more than 1/8 of the memory for the code range. 172 // Reserve no more than 1/8 of the memory for the code range.
173 code_range_size_ = Min(code_range_size_, max_virtual >> 3); 173 code_range_size_ = Min(code_range_size_, max_virtual >> 3);
174 } 174 }
175 } 175 }
176 176
177 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); 177 memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
178 global_contexts_list_ = NULL; 178 native_contexts_list_ = NULL;
179 mark_compact_collector_.heap_ = this; 179 mark_compact_collector_.heap_ = this;
180 external_string_table_.heap_ = this; 180 external_string_table_.heap_ = this;
181 // Put a dummy entry in the remembered pages so we can find the list the 181 // Put a dummy entry in the remembered pages so we can find the list the
182 // minidump even if there are no real unmapped pages. 182 // minidump even if there are no real unmapped pages.
183 RememberUnmappedPage(NULL, false); 183 RememberUnmappedPage(NULL, false);
184 184
185 ClearObjectStats(true); 185 ClearObjectStats(true);
186 } 186 }
187 187
188 188
(...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after
748 748
749 // Committing memory to from space failed again. 749 // Committing memory to from space failed again.
750 // Memory is exhausted and we will die. 750 // Memory is exhausted and we will die.
751 V8::FatalProcessOutOfMemory("Committing semi space failed."); 751 V8::FatalProcessOutOfMemory("Committing semi space failed.");
752 } 752 }
753 753
754 754
755 void Heap::ClearJSFunctionResultCaches() { 755 void Heap::ClearJSFunctionResultCaches() {
756 if (isolate_->bootstrapper()->IsActive()) return; 756 if (isolate_->bootstrapper()->IsActive()) return;
757 757
758 Object* context = global_contexts_list_; 758 Object* context = native_contexts_list_;
759 while (!context->IsUndefined()) { 759 while (!context->IsUndefined()) {
760 // Get the caches for this context. GC can happen when the context 760 // Get the caches for this context. GC can happen when the context
761 // is not fully initialized, so the caches can be undefined. 761 // is not fully initialized, so the caches can be undefined.
762 Object* caches_or_undefined = 762 Object* caches_or_undefined =
763 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); 763 Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX);
764 if (!caches_or_undefined->IsUndefined()) { 764 if (!caches_or_undefined->IsUndefined()) {
765 FixedArray* caches = FixedArray::cast(caches_or_undefined); 765 FixedArray* caches = FixedArray::cast(caches_or_undefined);
766 // Clear the caches: 766 // Clear the caches:
767 int length = caches->length(); 767 int length = caches->length();
768 for (int i = 0; i < length; i++) { 768 for (int i = 0; i < length; i++) {
769 JSFunctionResultCache::cast(caches->get(i))->Clear(); 769 JSFunctionResultCache::cast(caches->get(i))->Clear();
770 } 770 }
771 } 771 }
772 // Get the next context: 772 // Get the next context:
773 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 773 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
774 } 774 }
775 } 775 }
776 776
777 777
778 778
779 void Heap::ClearNormalizedMapCaches() { 779 void Heap::ClearNormalizedMapCaches() {
780 if (isolate_->bootstrapper()->IsActive() && 780 if (isolate_->bootstrapper()->IsActive() &&
781 !incremental_marking()->IsMarking()) { 781 !incremental_marking()->IsMarking()) {
782 return; 782 return;
783 } 783 }
784 784
785 Object* context = global_contexts_list_; 785 Object* context = native_contexts_list_;
786 while (!context->IsUndefined()) { 786 while (!context->IsUndefined()) {
787 // GC can happen when the context is not fully initialized, 787 // GC can happen when the context is not fully initialized,
788 // so the cache can be undefined. 788 // so the cache can be undefined.
789 Object* cache = 789 Object* cache =
790 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); 790 Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
791 if (!cache->IsUndefined()) { 791 if (!cache->IsUndefined()) {
792 NormalizedMapCache::cast(cache)->Clear(); 792 NormalizedMapCache::cast(cache)->Clear();
793 } 793 }
794 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); 794 context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
795 } 795 }
(...skipping 492 matching lines...) Expand 10 before | Expand all | Expand 10 after
1288 for (HeapObject* heap_object = cell_iterator.Next(); 1288 for (HeapObject* heap_object = cell_iterator.Next();
1289 heap_object != NULL; 1289 heap_object != NULL;
1290 heap_object = cell_iterator.Next()) { 1290 heap_object = cell_iterator.Next()) {
1291 if (heap_object->IsJSGlobalPropertyCell()) { 1291 if (heap_object->IsJSGlobalPropertyCell()) {
1292 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(heap_object); 1292 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(heap_object);
1293 Address value_address = cell->ValueAddress(); 1293 Address value_address = cell->ValueAddress();
1294 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); 1294 scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1295 } 1295 }
1296 } 1296 }
1297 1297
1298 // Scavenge object reachable from the global contexts list directly. 1298 // Scavenge object reachable from the native contexts list directly.
1299 scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); 1299 scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_));
1300 1300
1301 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1301 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1302 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( 1302 isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
1303 &IsUnscavengedHeapObject); 1303 &IsUnscavengedHeapObject);
1304 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots( 1304 isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
1305 &scavenge_visitor); 1305 &scavenge_visitor);
1306 new_space_front = DoScavenge(&scavenge_visitor, new_space_front); 1306 new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1307 1307
1308 UpdateNewSpaceReferencesInExternalStringTable( 1308 UpdateNewSpaceReferencesInExternalStringTable(
1309 &UpdateNewSpaceReferenceInExternalStringTableEntry); 1309 &UpdateNewSpaceReferenceInExternalStringTableEntry);
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
1449 } 1449 }
1450 1450
1451 return head; 1451 return head;
1452 } 1452 }
1453 1453
1454 1454
1455 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { 1455 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1456 Object* undefined = undefined_value(); 1456 Object* undefined = undefined_value();
1457 Object* head = undefined; 1457 Object* head = undefined;
1458 Context* tail = NULL; 1458 Context* tail = NULL;
1459 Object* candidate = global_contexts_list_; 1459 Object* candidate = native_contexts_list_;
1460 1460
1461 // We don't record weak slots during marking or scavenges. 1461 // We don't record weak slots during marking or scavenges.
1462 // Instead we do it once when we complete mark-compact cycle. 1462 // Instead we do it once when we complete mark-compact cycle.
1463 // Note that write barrier has no effect if we are already in the middle of 1463 // Note that write barrier has no effect if we are already in the middle of
1464 // compacting mark-sweep cycle and we have to record slots manually. 1464 // compacting mark-sweep cycle and we have to record slots manually.
1465 bool record_slots = 1465 bool record_slots =
1466 gc_state() == MARK_COMPACT && 1466 gc_state() == MARK_COMPACT &&
1467 mark_compact_collector()->is_compacting(); 1467 mark_compact_collector()->is_compacting();
1468 1468
1469 while (candidate != undefined) { 1469 while (candidate != undefined) {
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1522 1522
1523 // Terminate the list if there is one or more elements. 1523 // Terminate the list if there is one or more elements.
1524 if (tail != NULL) { 1524 if (tail != NULL) {
1525 tail->set_unchecked(this, 1525 tail->set_unchecked(this,
1526 Context::NEXT_CONTEXT_LINK, 1526 Context::NEXT_CONTEXT_LINK,
1527 Heap::undefined_value(), 1527 Heap::undefined_value(),
1528 UPDATE_WRITE_BARRIER); 1528 UPDATE_WRITE_BARRIER);
1529 } 1529 }
1530 1530
1531 // Update the head of the list of contexts. 1531 // Update the head of the list of contexts.
1532 global_contexts_list_ = head; 1532 native_contexts_list_ = head;
1533 } 1533 }
1534 1534
1535 1535
1536 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { 1536 void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
1537 AssertNoAllocation no_allocation; 1537 AssertNoAllocation no_allocation;
1538 1538
1539 class VisitorAdapter : public ObjectVisitor { 1539 class VisitorAdapter : public ObjectVisitor {
1540 public: 1540 public:
1541 explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor) 1541 explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor)
1542 : visitor_(visitor) {} 1542 : visitor_(visitor) {}
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
1648 class ScavengingVisitor : public StaticVisitorBase { 1648 class ScavengingVisitor : public StaticVisitorBase {
1649 public: 1649 public:
1650 static void Initialize() { 1650 static void Initialize() {
1651 table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString); 1651 table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
1652 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString); 1652 table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1653 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate); 1653 table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1654 table_.Register(kVisitByteArray, &EvacuateByteArray); 1654 table_.Register(kVisitByteArray, &EvacuateByteArray);
1655 table_.Register(kVisitFixedArray, &EvacuateFixedArray); 1655 table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1656 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); 1656 table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
1657 1657
1658 table_.Register(kVisitGlobalContext, 1658 table_.Register(kVisitNativeContext,
1659 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1659 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1660 template VisitSpecialized<Context::kSize>); 1660 template VisitSpecialized<Context::kSize>);
1661 1661
1662 table_.Register(kVisitConsString, 1662 table_.Register(kVisitConsString,
1663 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1663 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1664 template VisitSpecialized<ConsString::kSize>); 1664 template VisitSpecialized<ConsString::kSize>);
1665 1665
1666 table_.Register(kVisitSlicedString, 1666 table_.Register(kVisitSlicedString,
1667 &ObjectEvacuationStrategy<POINTER_OBJECT>:: 1667 &ObjectEvacuationStrategy<POINTER_OBJECT>::
1668 template VisitSpecialized<SlicedString::kSize>); 1668 template VisitSpecialized<SlicedString::kSize>);
(...skipping 765 matching lines...) Expand 10 before | Expand all | Expand 10 after
2434 { MaybeObject* maybe_obj = 2434 { MaybeObject* maybe_obj =
2435 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 2435 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2436 if (!maybe_obj->ToObject(&obj)) return false; 2436 if (!maybe_obj->ToObject(&obj)) return false;
2437 } 2437 }
2438 set_module_context_map(Map::cast(obj)); 2438 set_module_context_map(Map::cast(obj));
2439 2439
2440 { MaybeObject* maybe_obj = 2440 { MaybeObject* maybe_obj =
2441 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); 2441 AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2442 if (!maybe_obj->ToObject(&obj)) return false; 2442 if (!maybe_obj->ToObject(&obj)) return false;
2443 } 2443 }
2444 Map* global_context_map = Map::cast(obj); 2444 Map* native_context_map = Map::cast(obj);
2445 global_context_map->set_dictionary_map(true); 2445 native_context_map->set_dictionary_map(true);
2446 global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext); 2446 native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext);
2447 set_global_context_map(global_context_map); 2447 set_native_context_map(native_context_map);
2448 2448
2449 { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, 2449 { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
2450 SharedFunctionInfo::kAlignedSize); 2450 SharedFunctionInfo::kAlignedSize);
2451 if (!maybe_obj->ToObject(&obj)) return false; 2451 if (!maybe_obj->ToObject(&obj)) return false;
2452 } 2452 }
2453 set_shared_function_info_map(Map::cast(obj)); 2453 set_shared_function_info_map(Map::cast(obj));
2454 2454
2455 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE, 2455 { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE,
2456 JSMessageObject::kSize); 2456 JSMessageObject::kSize);
2457 if (!maybe_obj->ToObject(&obj)) return false; 2457 if (!maybe_obj->ToObject(&obj)) return false;
(...skipping 1258 matching lines...) Expand 10 before | Expand all | Expand 10 after
3716 function->set_literals_or_bindings(empty_fixed_array()); 3716 function->set_literals_or_bindings(empty_fixed_array());
3717 function->set_next_function_link(undefined_value()); 3717 function->set_next_function_link(undefined_value());
3718 } 3718 }
3719 3719
3720 3720
3721 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) { 3721 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
3722 // Allocate the prototype. Make sure to use the object function 3722 // Allocate the prototype. Make sure to use the object function
3723 // from the function's context, since the function can be from a 3723 // from the function's context, since the function can be from a
3724 // different context. 3724 // different context.
3725 JSFunction* object_function = 3725 JSFunction* object_function =
3726 function->context()->global_context()->object_function(); 3726 function->context()->native_context()->object_function();
3727 3727
3728 // Each function prototype gets a copy of the object function map. 3728 // Each function prototype gets a copy of the object function map.
3729 // This avoid unwanted sharing of maps between prototypes of different 3729 // This avoid unwanted sharing of maps between prototypes of different
3730 // constructors. 3730 // constructors.
3731 Map* new_map; 3731 Map* new_map;
3732 ASSERT(object_function->has_initial_map()); 3732 ASSERT(object_function->has_initial_map());
3733 MaybeObject* maybe_map = object_function->initial_map()->Copy(); 3733 MaybeObject* maybe_map = object_function->initial_map()->Copy();
3734 if (!maybe_map->To(&new_map)) return maybe_map; 3734 if (!maybe_map->To(&new_map)) return maybe_map;
3735 3735
3736 Object* prototype; 3736 Object* prototype;
(...skipping 29 matching lines...) Expand all
3766 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { 3766 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
3767 // To get fast allocation and map sharing for arguments objects we 3767 // To get fast allocation and map sharing for arguments objects we
3768 // allocate them based on an arguments boilerplate. 3768 // allocate them based on an arguments boilerplate.
3769 3769
3770 JSObject* boilerplate; 3770 JSObject* boilerplate;
3771 int arguments_object_size; 3771 int arguments_object_size;
3772 bool strict_mode_callee = callee->IsJSFunction() && 3772 bool strict_mode_callee = callee->IsJSFunction() &&
3773 !JSFunction::cast(callee)->shared()->is_classic_mode(); 3773 !JSFunction::cast(callee)->shared()->is_classic_mode();
3774 if (strict_mode_callee) { 3774 if (strict_mode_callee) {
3775 boilerplate = 3775 boilerplate =
3776 isolate()->context()->global_context()-> 3776 isolate()->context()->native_context()->
3777 strict_mode_arguments_boilerplate(); 3777 strict_mode_arguments_boilerplate();
3778 arguments_object_size = kArgumentsObjectSizeStrict; 3778 arguments_object_size = kArgumentsObjectSizeStrict;
3779 } else { 3779 } else {
3780 boilerplate = 3780 boilerplate =
3781 isolate()->context()->global_context()->arguments_boilerplate(); 3781 isolate()->context()->native_context()->arguments_boilerplate();
3782 arguments_object_size = kArgumentsObjectSize; 3782 arguments_object_size = kArgumentsObjectSize;
3783 } 3783 }
3784 3784
3785 // This calls Copy directly rather than using Heap::AllocateRaw so we 3785 // This calls Copy directly rather than using Heap::AllocateRaw so we
3786 // duplicate the check here. 3786 // duplicate the check here.
3787 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 3787 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
3788 3788
3789 // Check that the size of the boilerplate matches our 3789 // Check that the size of the boilerplate matches our
3790 // expectations. The ArgumentsAccessStub::GenerateNewObject relies 3790 // expectations. The ArgumentsAccessStub::GenerateNewObject relies
3791 // on the size being a known constant. 3791 // on the size being a known constant.
(...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after
4302 JSObject* jsobj = JSObject::cast(object); 4302 JSObject* jsobj = JSObject::cast(object);
4303 4303
4304 // Reinitialize the object from the constructor map. 4304 // Reinitialize the object from the constructor map.
4305 InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map); 4305 InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map);
4306 4306
4307 // Functions require some minimal initialization. 4307 // Functions require some minimal initialization.
4308 if (type == JS_FUNCTION_TYPE) { 4308 if (type == JS_FUNCTION_TYPE) {
4309 map->set_function_with_prototype(true); 4309 map->set_function_with_prototype(true);
4310 InitializeFunction(JSFunction::cast(object), shared, the_hole_value()); 4310 InitializeFunction(JSFunction::cast(object), shared, the_hole_value());
4311 JSFunction::cast(object)->set_context( 4311 JSFunction::cast(object)->set_context(
4312 isolate()->context()->global_context()); 4312 isolate()->context()->native_context());
4313 } 4313 }
4314 4314
4315 // Put in filler if the new object is smaller than the old. 4315 // Put in filler if the new object is smaller than the old.
4316 if (size_difference > 0) { 4316 if (size_difference > 0) {
4317 CreateFillerObjectAt( 4317 CreateFillerObjectAt(
4318 object->address() + map->instance_size(), size_difference); 4318 object->address() + map->instance_size(), size_difference);
4319 } 4319 }
4320 4320
4321 return object; 4321 return object;
4322 } 4322 }
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
4597 String::cast(result)->set_length(length); 4597 String::cast(result)->set_length(length);
4598 String::cast(result)->set_hash_field(String::kEmptyHashField); 4598 String::cast(result)->set_hash_field(String::kEmptyHashField);
4599 ASSERT_EQ(size, HeapObject::cast(result)->Size()); 4599 ASSERT_EQ(size, HeapObject::cast(result)->Size());
4600 return result; 4600 return result;
4601 } 4601 }
4602 4602
4603 4603
4604 MaybeObject* Heap::AllocateJSArray( 4604 MaybeObject* Heap::AllocateJSArray(
4605 ElementsKind elements_kind, 4605 ElementsKind elements_kind,
4606 PretenureFlag pretenure) { 4606 PretenureFlag pretenure) {
4607 Context* global_context = isolate()->context()->global_context(); 4607 Context* native_context = isolate()->context()->native_context();
4608 JSFunction* array_function = global_context->array_function(); 4608 JSFunction* array_function = native_context->array_function();
4609 Map* map = array_function->initial_map(); 4609 Map* map = array_function->initial_map();
4610 Object* maybe_map_array = global_context->js_array_maps(); 4610 Object* maybe_map_array = native_context->js_array_maps();
4611 if (!maybe_map_array->IsUndefined()) { 4611 if (!maybe_map_array->IsUndefined()) {
4612 Object* maybe_transitioned_map = 4612 Object* maybe_transitioned_map =
4613 FixedArray::cast(maybe_map_array)->get(elements_kind); 4613 FixedArray::cast(maybe_map_array)->get(elements_kind);
4614 if (!maybe_transitioned_map->IsUndefined()) { 4614 if (!maybe_transitioned_map->IsUndefined()) {
4615 map = Map::cast(maybe_transitioned_map); 4615 map = Map::cast(maybe_transitioned_map);
4616 } 4616 }
4617 } 4617 }
4618 4618
4619 return AllocateJSObjectFromMap(map, pretenure); 4619 return AllocateJSObjectFromMap(map, pretenure);
4620 } 4620 }
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
4883 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); 4883 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
4884 if (!maybe_result->ToObject(&result)) return maybe_result; 4884 if (!maybe_result->ToObject(&result)) return maybe_result;
4885 } 4885 }
4886 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( 4886 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(
4887 hash_table_map()); 4887 hash_table_map());
4888 ASSERT(result->IsHashTable()); 4888 ASSERT(result->IsHashTable());
4889 return result; 4889 return result;
4890 } 4890 }
4891 4891
4892 4892
4893 MaybeObject* Heap::AllocateGlobalContext() { 4893 MaybeObject* Heap::AllocateNativeContext() {
4894 Object* result; 4894 Object* result;
4895 { MaybeObject* maybe_result = 4895 { MaybeObject* maybe_result =
4896 AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); 4896 AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS);
4897 if (!maybe_result->ToObject(&result)) return maybe_result; 4897 if (!maybe_result->ToObject(&result)) return maybe_result;
4898 } 4898 }
4899 Context* context = reinterpret_cast<Context*>(result); 4899 Context* context = reinterpret_cast<Context*>(result);
4900 context->set_map_no_write_barrier(global_context_map()); 4900 context->set_map_no_write_barrier(native_context_map());
4901 context->set_js_array_maps(undefined_value()); 4901 context->set_js_array_maps(undefined_value());
4902 ASSERT(context->IsGlobalContext()); 4902 ASSERT(context->IsNativeContext());
4903 ASSERT(result->IsContext()); 4903 ASSERT(result->IsContext());
4904 return result; 4904 return result;
4905 } 4905 }
4906 4906
4907 4907
4908 MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) { 4908 MaybeObject* Heap::AllocateModuleContext(ScopeInfo* scope_info) {
4909 Object* result; 4909 Object* result;
4910 { MaybeObject* maybe_result = 4910 { MaybeObject* maybe_result =
4911 AllocateFixedArray(scope_info->ContextLength(), TENURED); 4911 AllocateFixedArray(scope_info->ContextLength(), TENURED);
4912 if (!maybe_result->ToObject(&result)) return maybe_result; 4912 if (!maybe_result->ToObject(&result)) return maybe_result;
(...skipping 1243 matching lines...) Expand 10 before | Expand all | Expand 10 after
6156 } 6156 }
6157 6157
6158 if (create_heap_objects) { 6158 if (create_heap_objects) {
6159 // Create initial maps. 6159 // Create initial maps.
6160 if (!CreateInitialMaps()) return false; 6160 if (!CreateInitialMaps()) return false;
6161 if (!CreateApiObjects()) return false; 6161 if (!CreateApiObjects()) return false;
6162 6162
6163 // Create initial objects 6163 // Create initial objects
6164 if (!CreateInitialObjects()) return false; 6164 if (!CreateInitialObjects()) return false;
6165 6165
6166 global_contexts_list_ = undefined_value(); 6166 native_contexts_list_ = undefined_value();
6167 } 6167 }
6168 6168
6169 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); 6169 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
6170 LOG(isolate_, IntPtrTEvent("heap-available", Available())); 6170 LOG(isolate_, IntPtrTEvent("heap-available", Available()));
6171 6171
6172 store_buffer()->SetUp(); 6172 store_buffer()->SetUp();
6173 6173
6174 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex(); 6174 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex();
6175 6175
6176 return true; 6176 return true;
(...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after
6683 MarkVisitor mark_visitor(this); 6683 MarkVisitor mark_visitor(this);
6684 MarkRecursively(root, &mark_visitor); 6684 MarkRecursively(root, &mark_visitor);
6685 6685
6686 UnmarkVisitor unmark_visitor(this); 6686 UnmarkVisitor unmark_visitor(this);
6687 UnmarkRecursively(root, &unmark_visitor); 6687 UnmarkRecursively(root, &unmark_visitor);
6688 6688
6689 ProcessResults(); 6689 ProcessResults();
6690 } 6690 }
6691 6691
6692 6692
6693 static bool SafeIsGlobalContext(HeapObject* obj) { 6693 static bool SafeIsNativeContext(HeapObject* obj) {
6694 return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map(); 6694 return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
6695 } 6695 }
6696 6696
6697 6697
6698 void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) { 6698 void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
6699 if (!(*p)->IsHeapObject()) return; 6699 if (!(*p)->IsHeapObject()) return;
6700 6700
6701 HeapObject* obj = HeapObject::cast(*p); 6701 HeapObject* obj = HeapObject::cast(*p);
6702 6702
6703 Object* map = obj->map(); 6703 Object* map = obj->map();
6704 6704
6705 if (!map->IsHeapObject()) return; // visited before 6705 if (!map->IsHeapObject()) return; // visited before
6706 6706
6707 if (found_target_in_trace_) return; // stop if target found 6707 if (found_target_in_trace_) return; // stop if target found
6708 object_stack_.Add(obj); 6708 object_stack_.Add(obj);
6709 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) || 6709 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
6710 (obj == search_target_)) { 6710 (obj == search_target_)) {
6711 found_target_in_trace_ = true; 6711 found_target_in_trace_ = true;
6712 found_target_ = true; 6712 found_target_ = true;
6713 return; 6713 return;
6714 } 6714 }
6715 6715
6716 bool is_global_context = SafeIsGlobalContext(obj); 6716 bool is_native_context = SafeIsNativeContext(obj);
6717 6717
6718 // not visited yet 6718 // not visited yet
6719 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map)); 6719 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
6720 6720
6721 Address map_addr = map_p->address(); 6721 Address map_addr = map_p->address();
6722 6722
6723 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag)); 6723 obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
6724 6724
6725 // Scan the object body. 6725 // Scan the object body.
6726 if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) { 6726 if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
6727 // This is specialized to scan Context's properly. 6727 // This is specialized to scan Context's properly.
6728 Object** start = reinterpret_cast<Object**>(obj->address() + 6728 Object** start = reinterpret_cast<Object**>(obj->address() +
6729 Context::kHeaderSize); 6729 Context::kHeaderSize);
6730 Object** end = reinterpret_cast<Object**>(obj->address() + 6730 Object** end = reinterpret_cast<Object**>(obj->address() +
6731 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize); 6731 Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize);
6732 mark_visitor->VisitPointers(start, end); 6732 mark_visitor->VisitPointers(start, end);
6733 } else { 6733 } else {
6734 obj->IterateBody(map_p->instance_type(), 6734 obj->IterateBody(map_p->instance_type(),
6735 obj->SizeFromMap(map_p), 6735 obj->SizeFromMap(map_p),
6736 mark_visitor); 6736 mark_visitor);
(...skipping 525 matching lines...) Expand 10 before | Expand all | Expand 10 after
7262 static_cast<int>(object_sizes_last_time_[index])); 7262 static_cast<int>(object_sizes_last_time_[index]));
7263 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) 7263 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT)
7264 #undef ADJUST_LAST_TIME_OBJECT_COUNT 7264 #undef ADJUST_LAST_TIME_OBJECT_COUNT
7265 7265
7266 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); 7266 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_));
7267 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); 7267 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_));
7268 ClearObjectStats(); 7268 ClearObjectStats();
7269 } 7269 }
7270 7270
7271 } } // namespace v8::internal 7271 } } // namespace v8::internal
OLDNEW
« src/heap.h ('K') | « src/heap.h ('k') | src/hydrogen.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698