Index: src/heap.cc |
diff --git a/src/heap.cc b/src/heap.cc |
index 8fc025338625aaed00cee4c5101c799296a24d84..e3e0e2a7df2da3c3cf34e2f3212ae340e6154379 100644 |
--- a/src/heap.cc |
+++ b/src/heap.cc |
@@ -175,7 +175,7 @@ Heap::Heap() |
} |
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); |
- global_contexts_list_ = NULL; |
+ native_contexts_list_ = NULL; |
mark_compact_collector_.heap_ = this; |
external_string_table_.heap_ = this; |
// Put a dummy entry in the remembered pages so we can find the list the |
@@ -755,7 +755,7 @@ void Heap::EnsureFromSpaceIsCommitted() { |
void Heap::ClearJSFunctionResultCaches() { |
if (isolate_->bootstrapper()->IsActive()) return; |
- Object* context = global_contexts_list_; |
+ Object* context = native_contexts_list_; |
while (!context->IsUndefined()) { |
// Get the caches for this context. GC can happen when the context |
// is not fully initialized, so the caches can be undefined. |
@@ -782,7 +782,7 @@ void Heap::ClearNormalizedMapCaches() { |
return; |
} |
- Object* context = global_contexts_list_; |
+ Object* context = native_contexts_list_; |
while (!context->IsUndefined()) { |
// GC can happen when the context is not fully initialized, |
// so the cache can be undefined. |
@@ -1295,8 +1295,8 @@ void Heap::Scavenge() { |
} |
} |
- // Scavenge object reachable from the global contexts list directly. |
- scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_)); |
+ // Scavenge object reachable from the native contexts list directly. |
+ scavenge_visitor.VisitPointer(BitCast<Object**>(&native_contexts_list_)); |
new_space_front = DoScavenge(&scavenge_visitor, new_space_front); |
isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles( |
@@ -1456,7 +1456,7 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
Object* undefined = undefined_value(); |
Object* head = undefined; |
Context* tail = NULL; |
- Object* candidate = global_contexts_list_; |
+ Object* candidate = native_contexts_list_; |
// We don't record weak slots during marking or scavenges. |
// Instead we do it once when we complete mark-compact cycle. |
@@ -1529,7 +1529,7 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) { |
} |
// Update the head of the list of contexts. |
- global_contexts_list_ = head; |
+ native_contexts_list_ = head; |
} |
@@ -1655,7 +1655,7 @@ class ScavengingVisitor : public StaticVisitorBase { |
table_.Register(kVisitFixedArray, &EvacuateFixedArray); |
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); |
- table_.Register(kVisitGlobalContext, |
+ table_.Register(kVisitNativeContext, |
&ObjectEvacuationStrategy<POINTER_OBJECT>:: |
template VisitSpecialized<Context::kSize>); |
@@ -2441,10 +2441,10 @@ bool Heap::CreateInitialMaps() { |
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel); |
if (!maybe_obj->ToObject(&obj)) return false; |
} |
- Map* global_context_map = Map::cast(obj); |
- global_context_map->set_dictionary_map(true); |
- global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext); |
- set_global_context_map(global_context_map); |
+ Map* native_context_map = Map::cast(obj); |
+ native_context_map->set_dictionary_map(true); |
+ native_context_map->set_visitor_id(StaticVisitorBase::kVisitNativeContext); |
+ set_native_context_map(native_context_map); |
{ MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, |
SharedFunctionInfo::kAlignedSize); |
@@ -3723,7 +3723,7 @@ MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) { |
// from the function's context, since the function can be from a |
// different context. |
JSFunction* object_function = |
- function->context()->global_context()->object_function(); |
+ function->context()->native_context()->object_function(); |
// Each function prototype gets a copy of the object function map. |
// This avoid unwanted sharing of maps between prototypes of different |
@@ -3773,12 +3773,12 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { |
!JSFunction::cast(callee)->shared()->is_classic_mode(); |
if (strict_mode_callee) { |
boilerplate = |
- isolate()->context()->global_context()-> |
+ isolate()->context()->native_context()-> |
strict_mode_arguments_boilerplate(); |
arguments_object_size = kArgumentsObjectSizeStrict; |
} else { |
boilerplate = |
- isolate()->context()->global_context()->arguments_boilerplate(); |
+ isolate()->context()->native_context()->arguments_boilerplate(); |
arguments_object_size = kArgumentsObjectSize; |
} |
@@ -4309,7 +4309,7 @@ MaybeObject* Heap::ReinitializeJSReceiver( |
map->set_function_with_prototype(true); |
InitializeFunction(JSFunction::cast(object), shared, the_hole_value()); |
JSFunction::cast(object)->set_context( |
- isolate()->context()->global_context()); |
+ isolate()->context()->native_context()); |
} |
// Put in filler if the new object is smaller than the old. |
@@ -4604,10 +4604,10 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, |
MaybeObject* Heap::AllocateJSArray( |
ElementsKind elements_kind, |
PretenureFlag pretenure) { |
- Context* global_context = isolate()->context()->global_context(); |
- JSFunction* array_function = global_context->array_function(); |
+ Context* native_context = isolate()->context()->native_context(); |
+ JSFunction* array_function = native_context->array_function(); |
Map* map = array_function->initial_map(); |
- Object* maybe_map_array = global_context->js_array_maps(); |
+ Object* maybe_map_array = native_context->js_array_maps(); |
if (!maybe_map_array->IsUndefined()) { |
Object* maybe_transitioned_map = |
FixedArray::cast(maybe_map_array)->get(elements_kind); |
@@ -4890,16 +4890,16 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
} |
-MaybeObject* Heap::AllocateGlobalContext() { |
+MaybeObject* Heap::AllocateNativeContext() { |
Object* result; |
{ MaybeObject* maybe_result = |
- AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS); |
+ AllocateFixedArray(Context::NATIVE_CONTEXT_SLOTS); |
if (!maybe_result->ToObject(&result)) return maybe_result; |
} |
Context* context = reinterpret_cast<Context*>(result); |
- context->set_map_no_write_barrier(global_context_map()); |
+ context->set_map_no_write_barrier(native_context_map()); |
context->set_js_array_maps(undefined_value()); |
- ASSERT(context->IsGlobalContext()); |
+ ASSERT(context->IsNativeContext()); |
ASSERT(result->IsContext()); |
return result; |
} |
@@ -6163,7 +6163,7 @@ bool Heap::SetUp(bool create_heap_objects) { |
// Create initial objects |
if (!CreateInitialObjects()) return false; |
- global_contexts_list_ = undefined_value(); |
+ native_contexts_list_ = undefined_value(); |
} |
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); |
@@ -6690,8 +6690,8 @@ void PathTracer::TracePathFrom(Object** root) { |
} |
-static bool SafeIsGlobalContext(HeapObject* obj) { |
- return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map(); |
+static bool SafeIsNativeContext(HeapObject* obj) { |
+ return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map(); |
} |
@@ -6713,7 +6713,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) { |
return; |
} |
- bool is_global_context = SafeIsGlobalContext(obj); |
+ bool is_native_context = SafeIsNativeContext(obj); |
// not visited yet |
Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map)); |
@@ -6723,7 +6723,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) { |
obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag)); |
// Scan the object body. |
- if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) { |
+ if (is_native_context && (visit_mode_ == VISIT_ONLY_STRONG)) { |
// This is specialized to scan Context's properly. |
Object** start = reinterpret_cast<Object**>(obj->address() + |
Context::kHeaderSize); |