| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1027 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1038 return false; | 1038 return false; |
| 1039 } | 1039 } |
| 1040 | 1040 |
| 1041 | 1041 |
| 1042 MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) { | 1042 MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) { |
| 1043 #ifdef DEBUG | 1043 #ifdef DEBUG |
| 1044 // Do not attempt to flatten in debug mode when allocation is not | 1044 // Do not attempt to flatten in debug mode when allocation is not |
| 1045 // allowed. This is to avoid an assertion failure when allocating. | 1045 // allowed. This is to avoid an assertion failure when allocating. |
| 1046 // Flattening strings is the only case where we always allow | 1046 // Flattening strings is the only case where we always allow |
| 1047 // allocation because no GC is performed if the allocation fails. | 1047 // allocation because no GC is performed if the allocation fails. |
| 1048 if (!HEAP->IsAllocationAllowed()) return this; | 1048 if (!AllowHeapAllocation::IsAllowed()) return this; |
| 1049 #endif | 1049 #endif |
| 1050 | 1050 |
| 1051 Heap* heap = GetHeap(); | 1051 Heap* heap = GetHeap(); |
| 1052 switch (StringShape(this).representation_tag()) { | 1052 switch (StringShape(this).representation_tag()) { |
| 1053 case kConsStringTag: { | 1053 case kConsStringTag: { |
| 1054 ConsString* cs = ConsString::cast(this); | 1054 ConsString* cs = ConsString::cast(this); |
| 1055 if (cs->second()->length() == 0) { | 1055 if (cs->second()->length() == 0) { |
| 1056 return cs->first(); | 1056 return cs->first(); |
| 1057 } | 1057 } |
| 1058 // There's little point in putting the flat string in new space if the | 1058 // There's little point in putting the flat string in new space if the |
| (...skipping 1562 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2621 return maybe_map; | 2621 return maybe_map; |
| 2622 } | 2622 } |
| 2623 } | 2623 } |
| 2624 | 2624 |
| 2625 new_map->set_owns_descriptors(true); | 2625 new_map->set_owns_descriptors(true); |
| 2626 return new_map; | 2626 return new_map; |
| 2627 } | 2627 } |
| 2628 | 2628 |
| 2629 | 2629 |
| 2630 Map* Map::CurrentMapForDeprecated() { | 2630 Map* Map::CurrentMapForDeprecated() { |
| 2631 AssertNoAllocation no_allocation; | 2631 DisallowHeapAllocation no_allocation; |
| 2632 if (!is_deprecated()) return this; | 2632 if (!is_deprecated()) return this; |
| 2633 | 2633 |
| 2634 DescriptorArray* old_descriptors = instance_descriptors(); | 2634 DescriptorArray* old_descriptors = instance_descriptors(); |
| 2635 | 2635 |
| 2636 int descriptors = NumberOfOwnDescriptors(); | 2636 int descriptors = NumberOfOwnDescriptors(); |
| 2637 Map* root_map = FindRootMap(); | 2637 Map* root_map = FindRootMap(); |
| 2638 | 2638 |
| 2639 // Check the state of the root map. | 2639 // Check the state of the root map. |
| 2640 if (!EquivalentToForTransition(root_map)) return NULL; | 2640 if (!EquivalentToForTransition(root_map)) return NULL; |
| 2641 int verbatim = root_map->NumberOfOwnDescriptors(); | 2641 int verbatim = root_map->NumberOfOwnDescriptors(); |
| (...skipping 2552 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5194 if (!key->IsUndefined()) return true; | 5194 if (!key->IsUndefined()) return true; |
| 5195 } | 5195 } |
| 5196 return false; | 5196 return false; |
| 5197 } | 5197 } |
| 5198 | 5198 |
| 5199 | 5199 |
| 5200 // Check whether this object references another object. | 5200 // Check whether this object references another object. |
| 5201 bool JSObject::ReferencesObject(Object* obj) { | 5201 bool JSObject::ReferencesObject(Object* obj) { |
| 5202 Map* map_of_this = map(); | 5202 Map* map_of_this = map(); |
| 5203 Heap* heap = GetHeap(); | 5203 Heap* heap = GetHeap(); |
| 5204 AssertNoAllocation no_alloc; | 5204 DisallowHeapAllocation no_allocation; |
| 5205 | 5205 |
| 5206 // Is the object the constructor for this object? | 5206 // Is the object the constructor for this object? |
| 5207 if (map_of_this->constructor() == obj) { | 5207 if (map_of_this->constructor() == obj) { |
| 5208 return true; | 5208 return true; |
| 5209 } | 5209 } |
| 5210 | 5210 |
| 5211 // Is the object the prototype for this object? | 5211 // Is the object the prototype for this object? |
| 5212 if (map_of_this->prototype() == obj) { | 5212 if (map_of_this->prototype() == obj) { |
| 5213 return true; | 5213 return true; |
| 5214 } | 5214 } |
| (...skipping 2323 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7538 | 7538 |
| 7539 MaybeObject* FixedArray::CopySize(int new_length) { | 7539 MaybeObject* FixedArray::CopySize(int new_length) { |
| 7540 Heap* heap = GetHeap(); | 7540 Heap* heap = GetHeap(); |
| 7541 if (new_length == 0) return heap->empty_fixed_array(); | 7541 if (new_length == 0) return heap->empty_fixed_array(); |
| 7542 Object* obj; | 7542 Object* obj; |
| 7543 { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length); | 7543 { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length); |
| 7544 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 7544 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 7545 } | 7545 } |
| 7546 FixedArray* result = FixedArray::cast(obj); | 7546 FixedArray* result = FixedArray::cast(obj); |
| 7547 // Copy the content | 7547 // Copy the content |
| 7548 AssertNoAllocation no_gc; | 7548 DisallowHeapAllocation no_gc; |
| 7549 int len = length(); | 7549 int len = length(); |
| 7550 if (new_length < len) len = new_length; | 7550 if (new_length < len) len = new_length; |
| 7551 // We are taking the map from the old fixed array so the map is sure to | 7551 // We are taking the map from the old fixed array so the map is sure to |
| 7552 // be an immortal immutable object. | 7552 // be an immortal immutable object. |
| 7553 result->set_map_no_write_barrier(map()); | 7553 result->set_map_no_write_barrier(map()); |
| 7554 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); | 7554 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); |
| 7555 for (int i = 0; i < len; i++) { | 7555 for (int i = 0; i < len; i++) { |
| 7556 result->set(i, get(i), mode); | 7556 result->set(i, get(i), mode); |
| 7557 } | 7557 } |
| 7558 return result; | 7558 return result; |
| 7559 } | 7559 } |
| 7560 | 7560 |
| 7561 | 7561 |
| 7562 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) { | 7562 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) { |
| 7563 AssertNoAllocation no_gc; | 7563 DisallowHeapAllocation no_gc; |
| 7564 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc); | 7564 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc); |
| 7565 for (int index = 0; index < len; index++) { | 7565 for (int index = 0; index < len; index++) { |
| 7566 dest->set(dest_pos+index, get(pos+index), mode); | 7566 dest->set(dest_pos+index, get(pos+index), mode); |
| 7567 } | 7567 } |
| 7568 } | 7568 } |
| 7569 | 7569 |
| 7570 | 7570 |
| 7571 #ifdef DEBUG | 7571 #ifdef DEBUG |
| 7572 bool FixedArray::IsEqualTo(FixedArray* other) { | 7572 bool FixedArray::IsEqualTo(FixedArray* other) { |
| 7573 if (length() != other->length()) return false; | 7573 if (length() != other->length()) return false; |
| (...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7837 #endif | 7837 #endif |
| 7838 | 7838 |
| 7839 | 7839 |
| 7840 bool String::LooksValid() { | 7840 bool String::LooksValid() { |
| 7841 if (!Isolate::Current()->heap()->Contains(this)) return false; | 7841 if (!Isolate::Current()->heap()->Contains(this)) return false; |
| 7842 return true; | 7842 return true; |
| 7843 } | 7843 } |
| 7844 | 7844 |
| 7845 | 7845 |
| 7846 String::FlatContent String::GetFlatContent() { | 7846 String::FlatContent String::GetFlatContent() { |
| 7847 ASSERT(!GetHeap()->allow_allocation(false)); | 7847 ASSERT(!AllowHeapAllocation::IsAllowed()); |
| 7848 int length = this->length(); | 7848 int length = this->length(); |
| 7849 StringShape shape(this); | 7849 StringShape shape(this); |
| 7850 String* string = this; | 7850 String* string = this; |
| 7851 int offset = 0; | 7851 int offset = 0; |
| 7852 if (shape.representation_tag() == kConsStringTag) { | 7852 if (shape.representation_tag() == kConsStringTag) { |
| 7853 ConsString* cons = ConsString::cast(string); | 7853 ConsString* cons = ConsString::cast(string); |
| 7854 if (cons->second()->length() != 0) { | 7854 if (cons->second()->length() != 0) { |
| 7855 return FlatContent(); | 7855 return FlatContent(); |
| 7856 } | 7856 } |
| 7857 string = cons->first(); | 7857 string = cons->first(); |
| (...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8064 str_(0), | 8064 str_(0), |
| 8065 is_ascii_(true), | 8065 is_ascii_(true), |
| 8066 length_(input.length()), | 8066 length_(input.length()), |
| 8067 start_(input.start()) { } | 8067 start_(input.start()) { } |
| 8068 | 8068 |
| 8069 | 8069 |
| 8070 void FlatStringReader::PostGarbageCollection() { | 8070 void FlatStringReader::PostGarbageCollection() { |
| 8071 if (str_ == NULL) return; | 8071 if (str_ == NULL) return; |
| 8072 Handle<String> str(str_); | 8072 Handle<String> str(str_); |
| 8073 ASSERT(str->IsFlat()); | 8073 ASSERT(str->IsFlat()); |
| 8074 AssertNoAllocation no_gc; | 8074 DisallowHeapAllocation no_gc; |
| 8075 // This does not actually prevent the vector from being relocated later. | 8075 // This does not actually prevent the vector from being relocated later. |
| 8076 String::FlatContent content = str->GetFlatContent(); | 8076 String::FlatContent content = str->GetFlatContent(); |
| 8077 ASSERT(content.IsFlat()); | 8077 ASSERT(content.IsFlat()); |
| 8078 is_ascii_ = content.IsAscii(); | 8078 is_ascii_ = content.IsAscii(); |
| 8079 if (is_ascii_) { | 8079 if (is_ascii_) { |
| 8080 start_ = content.ToOneByteVector().start(); | 8080 start_ = content.ToOneByteVector().start(); |
| 8081 } else { | 8081 } else { |
| 8082 start_ = content.ToUC16Vector().start(); | 8082 start_ = content.ToUC16Vector().start(); |
| 8083 } | 8083 } |
| 8084 } | 8084 } |
| (...skipping 532 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8617 utf8_data += cursor; | 8617 utf8_data += cursor; |
| 8618 remaining_in_str -= cursor; | 8618 remaining_in_str -= cursor; |
| 8619 } | 8619 } |
| 8620 return (allow_prefix_match || i == slen) && remaining_in_str == 0; | 8620 return (allow_prefix_match || i == slen) && remaining_in_str == 0; |
| 8621 } | 8621 } |
| 8622 | 8622 |
| 8623 | 8623 |
| 8624 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) { | 8624 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) { |
| 8625 int slen = length(); | 8625 int slen = length(); |
| 8626 if (str.length() != slen) return false; | 8626 if (str.length() != slen) return false; |
| 8627 AssertNoAllocation no_gc; | 8627 DisallowHeapAllocation no_gc; |
| 8628 FlatContent content = GetFlatContent(); | 8628 FlatContent content = GetFlatContent(); |
| 8629 if (content.IsAscii()) { | 8629 if (content.IsAscii()) { |
| 8630 return CompareChars(content.ToOneByteVector().start(), | 8630 return CompareChars(content.ToOneByteVector().start(), |
| 8631 str.start(), slen) == 0; | 8631 str.start(), slen) == 0; |
| 8632 } | 8632 } |
| 8633 for (int i = 0; i < slen; i++) { | 8633 for (int i = 0; i < slen; i++) { |
| 8634 if (Get(i) != static_cast<uint16_t>(str[i])) return false; | 8634 if (Get(i) != static_cast<uint16_t>(str[i])) return false; |
| 8635 } | 8635 } |
| 8636 return true; | 8636 return true; |
| 8637 } | 8637 } |
| 8638 | 8638 |
| 8639 | 8639 |
| 8640 bool String::IsTwoByteEqualTo(Vector<const uc16> str) { | 8640 bool String::IsTwoByteEqualTo(Vector<const uc16> str) { |
| 8641 int slen = length(); | 8641 int slen = length(); |
| 8642 if (str.length() != slen) return false; | 8642 if (str.length() != slen) return false; |
| 8643 AssertNoAllocation no_gc; | 8643 DisallowHeapAllocation no_gc; |
| 8644 FlatContent content = GetFlatContent(); | 8644 FlatContent content = GetFlatContent(); |
| 8645 if (content.IsTwoByte()) { | 8645 if (content.IsTwoByte()) { |
| 8646 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0; | 8646 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0; |
| 8647 } | 8647 } |
| 8648 for (int i = 0; i < slen; i++) { | 8648 for (int i = 0; i < slen; i++) { |
| 8649 if (Get(i) != str[i]) return false; | 8649 if (Get(i) != str[i]) return false; |
| 8650 } | 8650 } |
| 8651 return true; | 8651 return true; |
| 8652 } | 8652 } |
| 8653 | 8653 |
| (...skipping 1110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 9764 if (length != recompiled_relocation->length()) return false; | 9764 if (length != recompiled_relocation->length()) return false; |
| 9765 int compare = memcmp(code_relocation->GetDataStartAddress(), | 9765 int compare = memcmp(code_relocation->GetDataStartAddress(), |
| 9766 recompiled_relocation->GetDataStartAddress(), | 9766 recompiled_relocation->GetDataStartAddress(), |
| 9767 length); | 9767 length); |
| 9768 return compare == 0; | 9768 return compare == 0; |
| 9769 } | 9769 } |
| 9770 | 9770 |
| 9771 | 9771 |
| 9772 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { | 9772 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { |
| 9773 ASSERT(!has_deoptimization_support()); | 9773 ASSERT(!has_deoptimization_support()); |
| 9774 AssertNoAllocation no_allocation; | 9774 DisallowHeapAllocation no_allocation; |
| 9775 Code* code = this->code(); | 9775 Code* code = this->code(); |
| 9776 if (IsCodeEquivalent(code, recompiled)) { | 9776 if (IsCodeEquivalent(code, recompiled)) { |
| 9777 // Copy the deoptimization data from the recompiled code. | 9777 // Copy the deoptimization data from the recompiled code. |
| 9778 code->set_deoptimization_data(recompiled->deoptimization_data()); | 9778 code->set_deoptimization_data(recompiled->deoptimization_data()); |
| 9779 code->set_has_deoptimization_support(true); | 9779 code->set_has_deoptimization_support(true); |
| 9780 } else { | 9780 } else { |
| 9781 // TODO(3025757): In case the recompiled isn't equivalent to the | 9781 // TODO(3025757): In case the recompiled isn't equivalent to the |
| 9782 // old code, we have to replace it. We should try to avoid this | 9782 // old code, we have to replace it. We should try to avoid this |
| 9783 // altogether because it flushes valuable type feedback by | 9783 // altogether because it flushes valuable type feedback by |
| 9784 // effectively resetting all IC state. | 9784 // effectively resetting all IC state. |
| (...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 10081 | 10081 |
| 10082 // unbox handles and relocate | 10082 // unbox handles and relocate |
| 10083 intptr_t delta = instruction_start() - desc.buffer; | 10083 intptr_t delta = instruction_start() - desc.buffer; |
| 10084 int mode_mask = RelocInfo::kCodeTargetMask | | 10084 int mode_mask = RelocInfo::kCodeTargetMask | |
| 10085 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | | 10085 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
| 10086 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | | 10086 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | |
| 10087 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | | 10087 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | |
| 10088 RelocInfo::kApplyMask; | 10088 RelocInfo::kApplyMask; |
| 10089 // Needed to find target_object and runtime_entry on X64 | 10089 // Needed to find target_object and runtime_entry on X64 |
| 10090 Assembler* origin = desc.origin; | 10090 Assembler* origin = desc.origin; |
| 10091 ALLOW_HANDLE_DEREF(GetIsolate(), "embedding raw addresses into code"); | 10091 AllowDeferredHandleDereference embedding_raw_address; |
| 10092 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { | 10092 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { |
| 10093 RelocInfo::Mode mode = it.rinfo()->rmode(); | 10093 RelocInfo::Mode mode = it.rinfo()->rmode(); |
| 10094 if (mode == RelocInfo::EMBEDDED_OBJECT) { | 10094 if (mode == RelocInfo::EMBEDDED_OBJECT) { |
| 10095 Handle<Object> p = it.rinfo()->target_object_handle(origin); | 10095 Handle<Object> p = it.rinfo()->target_object_handle(origin); |
| 10096 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER); | 10096 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER); |
| 10097 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { | 10097 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { |
| 10098 Handle<JSGlobalPropertyCell> cell = it.rinfo()->target_cell_handle(); | 10098 Handle<JSGlobalPropertyCell> cell = it.rinfo()->target_cell_handle(); |
| 10099 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER); | 10099 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER); |
| 10100 } else if (RelocInfo::IsCodeTarget(mode)) { | 10100 } else if (RelocInfo::IsCodeTarget(mode)) { |
| 10101 // rewrite code handles in inline cache targets to direct | 10101 // rewrite code handles in inline cache targets to direct |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 10172 | 10172 |
| 10173 | 10173 |
| 10174 SafepointEntry Code::GetSafepointEntry(Address pc) { | 10174 SafepointEntry Code::GetSafepointEntry(Address pc) { |
| 10175 SafepointTable table(this); | 10175 SafepointTable table(this); |
| 10176 return table.FindEntry(pc); | 10176 return table.FindEntry(pc); |
| 10177 } | 10177 } |
| 10178 | 10178 |
| 10179 | 10179 |
| 10180 Map* Code::FindFirstMap() { | 10180 Map* Code::FindFirstMap() { |
| 10181 ASSERT(is_inline_cache_stub()); | 10181 ASSERT(is_inline_cache_stub()); |
| 10182 AssertNoAllocation no_allocation; | 10182 DisallowHeapAllocation no_allocation; |
| 10183 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 10183 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 10184 for (RelocIterator it(this, mask); !it.done(); it.next()) { | 10184 for (RelocIterator it(this, mask); !it.done(); it.next()) { |
| 10185 RelocInfo* info = it.rinfo(); | 10185 RelocInfo* info = it.rinfo(); |
| 10186 Object* object = info->target_object(); | 10186 Object* object = info->target_object(); |
| 10187 if (object->IsMap()) return Map::cast(object); | 10187 if (object->IsMap()) return Map::cast(object); |
| 10188 } | 10188 } |
| 10189 return NULL; | 10189 return NULL; |
| 10190 } | 10190 } |
| 10191 | 10191 |
| 10192 | 10192 |
| 10193 void Code::ReplaceFirstMap(Map* replace_with) { | 10193 void Code::ReplaceFirstMap(Map* replace_with) { |
| 10194 ASSERT(is_inline_cache_stub()); | 10194 ASSERT(is_inline_cache_stub()); |
| 10195 AssertNoAllocation no_allocation; | 10195 DisallowHeapAllocation no_allocation; |
| 10196 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 10196 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 10197 for (RelocIterator it(this, mask); !it.done(); it.next()) { | 10197 for (RelocIterator it(this, mask); !it.done(); it.next()) { |
| 10198 RelocInfo* info = it.rinfo(); | 10198 RelocInfo* info = it.rinfo(); |
| 10199 Object* object = info->target_object(); | 10199 Object* object = info->target_object(); |
| 10200 if (object->IsMap()) { | 10200 if (object->IsMap()) { |
| 10201 info->set_target_object(replace_with); | 10201 info->set_target_object(replace_with); |
| 10202 return; | 10202 return; |
| 10203 } | 10203 } |
| 10204 } | 10204 } |
| 10205 UNREACHABLE(); | 10205 UNREACHABLE(); |
| 10206 } | 10206 } |
| 10207 | 10207 |
| 10208 | 10208 |
| 10209 void Code::FindAllMaps(MapHandleList* maps) { | 10209 void Code::FindAllMaps(MapHandleList* maps) { |
| 10210 ASSERT(is_inline_cache_stub()); | 10210 ASSERT(is_inline_cache_stub()); |
| 10211 AssertNoAllocation no_allocation; | 10211 DisallowHeapAllocation no_allocation; |
| 10212 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 10212 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 10213 for (RelocIterator it(this, mask); !it.done(); it.next()) { | 10213 for (RelocIterator it(this, mask); !it.done(); it.next()) { |
| 10214 RelocInfo* info = it.rinfo(); | 10214 RelocInfo* info = it.rinfo(); |
| 10215 Object* object = info->target_object(); | 10215 Object* object = info->target_object(); |
| 10216 if (object->IsMap()) maps->Add(Handle<Map>(Map::cast(object))); | 10216 if (object->IsMap()) maps->Add(Handle<Map>(Map::cast(object))); |
| 10217 } | 10217 } |
| 10218 } | 10218 } |
| 10219 | 10219 |
| 10220 | 10220 |
| 10221 Code* Code::FindFirstCode() { | 10221 Code* Code::FindFirstCode() { |
| 10222 ASSERT(is_inline_cache_stub()); | 10222 ASSERT(is_inline_cache_stub()); |
| 10223 AssertNoAllocation no_allocation; | 10223 DisallowHeapAllocation no_allocation; |
| 10224 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); | 10224 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); |
| 10225 for (RelocIterator it(this, mask); !it.done(); it.next()) { | 10225 for (RelocIterator it(this, mask); !it.done(); it.next()) { |
| 10226 RelocInfo* info = it.rinfo(); | 10226 RelocInfo* info = it.rinfo(); |
| 10227 return Code::GetCodeFromTargetAddress(info->target_address()); | 10227 return Code::GetCodeFromTargetAddress(info->target_address()); |
| 10228 } | 10228 } |
| 10229 return NULL; | 10229 return NULL; |
| 10230 } | 10230 } |
| 10231 | 10231 |
| 10232 | 10232 |
| 10233 void Code::FindAllCode(CodeHandleList* code_list, int length) { | 10233 void Code::FindAllCode(CodeHandleList* code_list, int length) { |
| 10234 ASSERT(is_inline_cache_stub()); | 10234 ASSERT(is_inline_cache_stub()); |
| 10235 AssertNoAllocation no_allocation; | 10235 DisallowHeapAllocation no_allocation; |
| 10236 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); | 10236 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); |
| 10237 int i = 0; | 10237 int i = 0; |
| 10238 for (RelocIterator it(this, mask); !it.done(); it.next()) { | 10238 for (RelocIterator it(this, mask); !it.done(); it.next()) { |
| 10239 if (i++ == length) return; | 10239 if (i++ == length) return; |
| 10240 RelocInfo* info = it.rinfo(); | 10240 RelocInfo* info = it.rinfo(); |
| 10241 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); | 10241 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); |
| 10242 ASSERT(code->kind() == Code::STUB); | 10242 ASSERT(code->kind() == Code::STUB); |
| 10243 code_list->Add(Handle<Code>(code)); | 10243 code_list->Add(Handle<Code>(code)); |
| 10244 } | 10244 } |
| 10245 UNREACHABLE(); | 10245 UNREACHABLE(); |
| 10246 } | 10246 } |
| 10247 | 10247 |
| 10248 | 10248 |
| 10249 Name* Code::FindFirstName() { | 10249 Name* Code::FindFirstName() { |
| 10250 ASSERT(is_inline_cache_stub()); | 10250 ASSERT(is_inline_cache_stub()); |
| 10251 AssertNoAllocation no_allocation; | 10251 DisallowHeapAllocation no_allocation; |
| 10252 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 10252 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 10253 for (RelocIterator it(this, mask); !it.done(); it.next()) { | 10253 for (RelocIterator it(this, mask); !it.done(); it.next()) { |
| 10254 RelocInfo* info = it.rinfo(); | 10254 RelocInfo* info = it.rinfo(); |
| 10255 Object* object = info->target_object(); | 10255 Object* object = info->target_object(); |
| 10256 if (object->IsName()) return Name::cast(object); | 10256 if (object->IsName()) return Name::cast(object); |
| 10257 } | 10257 } |
| 10258 return NULL; | 10258 return NULL; |
| 10259 } | 10259 } |
| 10260 | 10260 |
| 10261 | 10261 |
| (...skipping 866 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 11128 public: | 11128 public: |
| 11129 virtual bool TakeFunction(JSFunction* function) { | 11129 virtual bool TakeFunction(JSFunction* function) { |
| 11130 return function->code()->marked_for_deoptimization(); | 11130 return function->code()->marked_for_deoptimization(); |
| 11131 } | 11131 } |
| 11132 }; | 11132 }; |
| 11133 | 11133 |
| 11134 | 11134 |
| 11135 void DependentCode::DeoptimizeDependentCodeGroup( | 11135 void DependentCode::DeoptimizeDependentCodeGroup( |
| 11136 Isolate* isolate, | 11136 Isolate* isolate, |
| 11137 DependentCode::DependencyGroup group) { | 11137 DependentCode::DependencyGroup group) { |
| 11138 AssertNoAllocation no_allocation_scope; | 11138 DisallowHeapAllocation no_allocation_scope; |
| 11139 DependentCode::GroupStartIndexes starts(this); | 11139 DependentCode::GroupStartIndexes starts(this); |
| 11140 int start = starts.at(group); | 11140 int start = starts.at(group); |
| 11141 int end = starts.at(group + 1); | 11141 int end = starts.at(group + 1); |
| 11142 int number_of_entries = starts.at(DependentCode::kGroupCount); | 11142 int number_of_entries = starts.at(DependentCode::kGroupCount); |
| 11143 if (start == end) return; | 11143 if (start == end) return; |
| 11144 for (int i = start; i < end; i++) { | 11144 for (int i = start; i < end; i++) { |
| 11145 Code* code = code_at(i); | 11145 Code* code = code_at(i); |
| 11146 code->set_marked_for_deoptimization(true); | 11146 code->set_marked_for_deoptimization(true); |
| 11147 } | 11147 } |
| 11148 // Compact the array by moving all subsequent groups to fill in the new holes. | 11148 // Compact the array by moving all subsequent groups to fill in the new holes. |
| (...skipping 1398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 12547 } | 12547 } |
| 12548 } | 12548 } |
| 12549 } | 12549 } |
| 12550 #endif | 12550 #endif |
| 12551 | 12551 |
| 12552 | 12552 |
| 12553 template<typename Shape, typename Key> | 12553 template<typename Shape, typename Key> |
| 12554 void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) { | 12554 void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) { |
| 12555 int pos = 0; | 12555 int pos = 0; |
| 12556 int capacity = HashTable<Shape, Key>::Capacity(); | 12556 int capacity = HashTable<Shape, Key>::Capacity(); |
| 12557 AssertNoAllocation no_gc; | 12557 DisallowHeapAllocation no_gc; |
| 12558 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc); | 12558 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc); |
| 12559 for (int i = 0; i < capacity; i++) { | 12559 for (int i = 0; i < capacity; i++) { |
| 12560 Object* k = Dictionary<Shape, Key>::KeyAt(i); | 12560 Object* k = Dictionary<Shape, Key>::KeyAt(i); |
| 12561 if (Dictionary<Shape, Key>::IsKey(k)) { | 12561 if (Dictionary<Shape, Key>::IsKey(k)) { |
| 12562 elements->set(pos++, ValueAt(i), mode); | 12562 elements->set(pos++, ValueAt(i), mode); |
| 12563 } | 12563 } |
| 12564 } | 12564 } |
| 12565 ASSERT(pos == elements->length()); | 12565 ASSERT(pos == elements->length()); |
| 12566 } | 12566 } |
| 12567 | 12567 |
| (...skipping 860 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13428 entry = NextProbe(entry, count++, capacity); | 13428 entry = NextProbe(entry, count++, capacity); |
| 13429 } | 13429 } |
| 13430 return kNotFound; | 13430 return kNotFound; |
| 13431 } | 13431 } |
| 13432 | 13432 |
| 13433 | 13433 |
| 13434 template<typename Shape, typename Key> | 13434 template<typename Shape, typename Key> |
| 13435 MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) { | 13435 MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) { |
| 13436 ASSERT(NumberOfElements() < new_table->Capacity()); | 13436 ASSERT(NumberOfElements() < new_table->Capacity()); |
| 13437 | 13437 |
| 13438 AssertNoAllocation no_gc; | 13438 DisallowHeapAllocation no_gc; |
| 13439 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc); | 13439 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc); |
| 13440 | 13440 |
| 13441 // Copy prefix to new array. | 13441 // Copy prefix to new array. |
| 13442 for (int i = kPrefixStartIndex; | 13442 for (int i = kPrefixStartIndex; |
| 13443 i < kPrefixStartIndex + Shape::kPrefixSize; | 13443 i < kPrefixStartIndex + Shape::kPrefixSize; |
| 13444 i++) { | 13444 i++) { |
| 13445 new_table->set(i, get(i), mode); | 13445 new_table->set(i, get(i), mode); |
| 13446 } | 13446 } |
| 13447 | 13447 |
| 13448 // Rehash the elements. | 13448 // Rehash the elements. |
| (...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13671 result_double = HeapNumber::cast(new_double); | 13671 result_double = HeapNumber::cast(new_double); |
| 13672 } | 13672 } |
| 13673 | 13673 |
| 13674 Object* obj; | 13674 Object* obj; |
| 13675 { MaybeObject* maybe_obj = | 13675 { MaybeObject* maybe_obj = |
| 13676 SeededNumberDictionary::Allocate(GetHeap(), dict->NumberOfElements()); | 13676 SeededNumberDictionary::Allocate(GetHeap(), dict->NumberOfElements()); |
| 13677 if (!maybe_obj->ToObject(&obj)) return maybe_obj; | 13677 if (!maybe_obj->ToObject(&obj)) return maybe_obj; |
| 13678 } | 13678 } |
| 13679 SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj); | 13679 SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj); |
| 13680 | 13680 |
| 13681 AssertNoAllocation no_alloc; | 13681 DisallowHeapAllocation no_alloc; |
| 13682 | 13682 |
| 13683 uint32_t pos = 0; | 13683 uint32_t pos = 0; |
| 13684 uint32_t undefs = 0; | 13684 uint32_t undefs = 0; |
| 13685 int capacity = dict->Capacity(); | 13685 int capacity = dict->Capacity(); |
| 13686 for (int i = 0; i < capacity; i++) { | 13686 for (int i = 0; i < capacity; i++) { |
| 13687 Object* k = dict->KeyAt(i); | 13687 Object* k = dict->KeyAt(i); |
| 13688 if (dict->IsKey(k)) { | 13688 if (dict->IsKey(k)) { |
| 13689 ASSERT(k->IsNumber()); | 13689 ASSERT(k->IsNumber()); |
| 13690 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); | 13690 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); |
| 13691 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); | 13691 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 13843 } | 13843 } |
| 13844 } | 13844 } |
| 13845 } | 13845 } |
| 13846 result = holes; | 13846 result = holes; |
| 13847 while (holes < limit) { | 13847 while (holes < limit) { |
| 13848 elements->set_the_hole(holes); | 13848 elements->set_the_hole(holes); |
| 13849 holes++; | 13849 holes++; |
| 13850 } | 13850 } |
| 13851 } else { | 13851 } else { |
| 13852 FixedArray* elements = FixedArray::cast(elements_base); | 13852 FixedArray* elements = FixedArray::cast(elements_base); |
| 13853 AssertNoAllocation no_alloc; | 13853 DisallowHeapAllocation no_gc; |
| 13854 | 13854 |
| 13855 // Split elements into defined, undefined and the_hole, in that order. Only | 13855 // Split elements into defined, undefined and the_hole, in that order. Only |
| 13856 // count locations for undefined and the hole, and fill them afterwards. | 13856 // count locations for undefined and the hole, and fill them afterwards. |
| 13857 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc); | 13857 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc); |
| 13858 unsigned int undefs = limit; | 13858 unsigned int undefs = limit; |
| 13859 unsigned int holes = limit; | 13859 unsigned int holes = limit; |
| 13860 // Assume most arrays contain no holes and undefined values, so minimize the | 13860 // Assume most arrays contain no holes and undefined values, so minimize the |
| 13861 // number of stores of non-undefined, non-the-hole values. | 13861 // number of stores of non-undefined, non-the-hole values. |
| 13862 for (unsigned int i = 0; i < undefs; i++) { | 13862 for (unsigned int i = 0; i < undefs; i++) { |
| 13863 Object* current = elements->get(i); | 13863 Object* current = elements->get(i); |
| 13864 if (current->IsTheHole()) { | 13864 if (current->IsTheHole()) { |
| 13865 holes--; | 13865 holes--; |
| 13866 undefs--; | 13866 undefs--; |
| 13867 } else if (current->IsUndefined()) { | 13867 } else if (current->IsUndefined()) { |
| (...skipping 1367 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 15235 int previous_length = | 15235 int previous_length = |
| 15236 previous.is_null() ? 0 : previous->serialized_data()->length(); | 15236 previous.is_null() ? 0 : previous->serialized_data()->length(); |
| 15237 int length = sizeof(descriptor) + previous_length; | 15237 int length = sizeof(descriptor) + previous_length; |
| 15238 Handle<ByteArray> serialized_descriptor = | 15238 Handle<ByteArray> serialized_descriptor = |
| 15239 isolate->factory()->NewByteArray(length); | 15239 isolate->factory()->NewByteArray(length); |
| 15240 Handle<DeclaredAccessorDescriptor> value = | 15240 Handle<DeclaredAccessorDescriptor> value = |
| 15241 isolate->factory()->NewDeclaredAccessorDescriptor(); | 15241 isolate->factory()->NewDeclaredAccessorDescriptor(); |
| 15242 value->set_serialized_data(*serialized_descriptor); | 15242 value->set_serialized_data(*serialized_descriptor); |
| 15243 // Copy in the data. | 15243 // Copy in the data. |
| 15244 { | 15244 { |
| 15245 AssertNoAllocation no_allocation; | 15245 DisallowHeapAllocation no_allocation; |
| 15246 uint8_t* array = serialized_descriptor->GetDataStartAddress(); | 15246 uint8_t* array = serialized_descriptor->GetDataStartAddress(); |
| 15247 if (previous_length != 0) { | 15247 if (previous_length != 0) { |
| 15248 uint8_t* previous_array = | 15248 uint8_t* previous_array = |
| 15249 previous->serialized_data()->GetDataStartAddress(); | 15249 previous->serialized_data()->GetDataStartAddress(); |
| 15250 OS::MemCopy(array, previous_array, previous_length); | 15250 OS::MemCopy(array, previous_array, previous_length); |
| 15251 array += previous_length; | 15251 array += previous_length; |
| 15252 } | 15252 } |
| 15253 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); | 15253 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); |
| 15254 DeclaredAccessorDescriptorData* data = | 15254 DeclaredAccessorDescriptorData* data = |
| 15255 reinterpret_cast<DeclaredAccessorDescriptorData*>(array); | 15255 reinterpret_cast<DeclaredAccessorDescriptorData*>(array); |
| (...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 15637 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); | 15637 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); |
| 15638 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); | 15638 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); |
| 15639 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); | 15639 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); |
| 15640 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); | 15640 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); |
| 15641 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); | 15641 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); |
| 15642 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); | 15642 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); |
| 15643 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); | 15643 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); |
| 15644 } | 15644 } |
| 15645 | 15645 |
| 15646 } } // namespace v8::internal | 15646 } } // namespace v8::internal |
| OLD | NEW |