OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3617 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3628 int instance_size_delta = map_of_this->instance_size() - new_instance_size; | 3628 int instance_size_delta = map_of_this->instance_size() - new_instance_size; |
3629 ASSERT(instance_size_delta >= 0); | 3629 ASSERT(instance_size_delta >= 0); |
3630 current_heap->CreateFillerObjectAt(this->address() + new_instance_size, | 3630 current_heap->CreateFillerObjectAt(this->address() + new_instance_size, |
3631 instance_size_delta); | 3631 instance_size_delta); |
3632 if (Marking::IsBlack(Marking::MarkBitFrom(this))) { | 3632 if (Marking::IsBlack(Marking::MarkBitFrom(this))) { |
3633 MemoryChunk::IncrementLiveBytesFromMutator(this->address(), | 3633 MemoryChunk::IncrementLiveBytesFromMutator(this->address(), |
3634 -instance_size_delta); | 3634 -instance_size_delta); |
3635 } | 3635 } |
3636 | 3636 |
3637 set_map(new_map); | 3637 set_map(new_map); |
| 3638 map_of_this->NotifyLeafMapLayoutChange(); |
3638 | 3639 |
3639 set_properties(dictionary); | 3640 set_properties(dictionary); |
3640 | 3641 |
3641 current_heap->isolate()->counters()->props_to_dictionary()->Increment(); | 3642 current_heap->isolate()->counters()->props_to_dictionary()->Increment(); |
3642 | 3643 |
3643 #ifdef DEBUG | 3644 #ifdef DEBUG |
3644 if (FLAG_trace_normalization) { | 3645 if (FLAG_trace_normalization) { |
3645 PrintF("Object properties have been normalized:\n"); | 3646 PrintF("Object properties have been normalized:\n"); |
3646 Print(); | 3647 Print(); |
3647 } | 3648 } |
(...skipping 1648 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5296 MaybeObject* maybe_result = RawCopy(instance_size()); | 5297 MaybeObject* maybe_result = RawCopy(instance_size()); |
5297 if (!maybe_result->To(&result)) return maybe_result; | 5298 if (!maybe_result->To(&result)) return maybe_result; |
5298 | 5299 |
5299 // Please note instance_type and instance_size are set when allocated. | 5300 // Please note instance_type and instance_size are set when allocated. |
5300 result->set_inobject_properties(inobject_properties()); | 5301 result->set_inobject_properties(inobject_properties()); |
5301 result->set_unused_property_fields(unused_property_fields()); | 5302 result->set_unused_property_fields(unused_property_fields()); |
5302 | 5303 |
5303 result->set_pre_allocated_property_fields(pre_allocated_property_fields()); | 5304 result->set_pre_allocated_property_fields(pre_allocated_property_fields()); |
5304 result->set_is_shared(false); | 5305 result->set_is_shared(false); |
5305 result->ClearCodeCache(GetHeap()); | 5306 result->ClearCodeCache(GetHeap()); |
| 5307 NotifyLeafMapLayoutChange(); |
5306 return result; | 5308 return result; |
5307 } | 5309 } |
5308 | 5310 |
5309 | 5311 |
5310 MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors, | 5312 MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors, |
5311 Descriptor* descriptor) { | 5313 Descriptor* descriptor) { |
5312 // Sanity check. This path is only to be taken if the map owns its descriptor | 5314 // Sanity check. This path is only to be taken if the map owns its descriptor |
5313 // array, implying that its NumberOfOwnDescriptors equals the number of | 5315 // array, implying that its NumberOfOwnDescriptors equals the number of |
5314 // descriptors in the descriptor array. | 5316 // descriptors in the descriptor array. |
5315 ASSERT(NumberOfOwnDescriptors() == | 5317 ASSERT(NumberOfOwnDescriptors() == |
(...skipping 4189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9505 | 9507 |
9506 | 9508 |
9507 void Map::ZapPrototypeTransitions() { | 9509 void Map::ZapPrototypeTransitions() { |
9508 FixedArray* proto_transitions = GetPrototypeTransitions(); | 9510 FixedArray* proto_transitions = GetPrototypeTransitions(); |
9509 MemsetPointer(proto_transitions->data_start(), | 9511 MemsetPointer(proto_transitions->data_start(), |
9510 GetHeap()->the_hole_value(), | 9512 GetHeap()->the_hole_value(), |
9511 proto_transitions->length()); | 9513 proto_transitions->length()); |
9512 } | 9514 } |
9513 | 9515 |
9514 | 9516 |
9515 Handle<DependentCodes> DependentCodes::Append(Handle<DependentCodes> codes, | 9517 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) { |
9516 Handle<Code> value) { | 9518 Recompute(entries); |
9517 int append_index = codes->number_of_codes(); | 9519 } |
9518 if (append_index > 0 && codes->code_at(append_index - 1) == *value) { | 9520 |
| 9521 |
| 9522 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) { |
| 9523 start_indexes_[0] = 0; |
| 9524 for (int g = 1; g <= kGroupCount; g++) { |
| 9525 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1)); |
| 9526 start_indexes_[g] = start_indexes_[g - 1] + count; |
| 9527 } |
| 9528 } |
| 9529 |
| 9530 |
| 9531 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries, |
| 9532 DependencyGroup group, |
| 9533 Handle<Code> value) { |
| 9534 GroupStartIndexes starts(*entries); |
| 9535 int start = starts.at(group); |
| 9536 int end = starts.at(group + 1); |
| 9537 int number_of_entries = starts.number_of_entries(); |
| 9538 if (start < end && entries->code_at(end - 1) == *value) { |
9519 // Do not append the code if it is already in the array. | 9539 // Do not append the code if it is already in the array. |
9520 // It is sufficient to just check only the last element because | 9540 // It is sufficient to just check only the last element because |
9521 // we process embedded maps of an optimized code in one batch. | 9541 // we process embedded maps of an optimized code in one batch. |
9522 return codes; | 9542 return entries; |
9523 } | 9543 } |
9524 if (codes->length() < kCodesIndex + append_index + 1) { | 9544 if (entries->length() < kCodesStartIndex + number_of_entries + 1) { |
9525 Factory* factory = codes->GetIsolate()->factory(); | 9545 Factory* factory = entries->GetIsolate()->factory(); |
9526 int capacity = kCodesIndex + append_index + 1; | 9546 int capacity = kCodesStartIndex + number_of_entries + 1; |
9527 if (capacity > 5) capacity = capacity * 5 / 4; | 9547 if (capacity > 5) capacity = capacity * 5 / 4; |
9528 Handle<DependentCodes> new_codes = Handle<DependentCodes>::cast( | 9548 Handle<DependentCode> new_entries = Handle<DependentCode>::cast( |
9529 factory->CopySizeFixedArray(codes, capacity)); | 9549 factory->CopySizeFixedArray(entries, capacity)); |
9530 // The number of codes can change after GC. | 9550 // The number of codes can change after GC. |
9531 append_index = codes->number_of_codes(); | 9551 starts.Recompute(*entries); |
9532 for (int i = 0; i < append_index; i++) { | 9552 start = starts.at(group); |
9533 codes->clear_code_at(i); | 9553 end = starts.at(group + 1); |
| 9554 number_of_entries = starts.number_of_entries(); |
| 9555 for (int i = 0; i < number_of_entries; i++) { |
| 9556 entries->clear_code_at(i); |
9534 } | 9557 } |
9535 codes = new_codes; | 9558 // If the old fixed array was empty, we need to reset counters of the |
| 9559 // new array. |
| 9560 if (number_of_entries == 0) { |
| 9561 for (int g = 0; g < kGroupCount; g++) { |
| 9562 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0); |
| 9563 } |
| 9564 } |
| 9565 entries = new_entries; |
9536 } | 9566 } |
9537 codes->set_code_at(append_index, *value); | 9567 entries->ExtendGroup(group); |
9538 codes->set_number_of_codes(append_index + 1); | 9568 entries->set_code_at(end, *value); |
9539 return codes; | 9569 entries->set_number_of_entries(group, end + 1 - start); |
| 9570 return entries; |
9540 } | 9571 } |
9541 | 9572 |
9542 | 9573 |
9543 bool DependentCodes::Contains(Code* code) { | 9574 bool DependentCode::Contains(DependencyGroup group, Code* code) { |
9544 int limit = number_of_codes(); | 9575 GroupStartIndexes starts(this); |
9545 for (int i = 0; i < limit; i++) { | 9576 int number_of_entries = starts.at(kGroupCount); |
| 9577 for (int i = 0; i < number_of_entries; i++) { |
9546 if (code_at(i) == code) return true; | 9578 if (code_at(i) == code) return true; |
9547 } | 9579 } |
9548 return false; | 9580 return false; |
9549 } | 9581 } |
9550 | 9582 |
9551 | 9583 |
| 9584 class DeoptimizeDependentCodeFilter : public OptimizedFunctionFilter { |
| 9585 public: |
| 9586 virtual bool TakeFunction(JSFunction* function) { |
| 9587 return function->code()->marked_for_deoptimization(); |
| 9588 } |
| 9589 }; |
| 9590 |
| 9591 |
| 9592 void DependentCode::DeoptimizeDependentCodeGroup( |
| 9593 DependentCode::DependencyGroup group) { |
| 9594 AssertNoAllocation no_allocation_scope; |
| 9595 DependentCode::GroupStartIndexes starts(this); |
| 9596 int start = starts.at(group); |
| 9597 int end = starts.at(group + 1); |
| 9598 int number_of_entries = starts.at(DependentCode::kGroupCount); |
| 9599 if (start == end) return; |
| 9600 for (int i = start; i < end; i++) { |
| 9601 Code* code = code_at(i); |
| 9602 code->set_marked_for_deoptimization(true); |
| 9603 } |
| 9604 for (int src = end, dst = start; src < number_of_entries; src++, dst++) { |
| 9605 set_code_at(dst, code_at(src)); |
| 9606 } |
| 9607 set_number_of_entries(group, 0); |
| 9608 DeoptimizeDependentCodeFilter filter; |
| 9609 Deoptimizer::DeoptimizeAllFunctionsWith(&filter); |
| 9610 } |
| 9611 |
| 9612 |
9552 MaybeObject* JSReceiver::SetPrototype(Object* value, | 9613 MaybeObject* JSReceiver::SetPrototype(Object* value, |
9553 bool skip_hidden_prototypes) { | 9614 bool skip_hidden_prototypes) { |
9554 #ifdef DEBUG | 9615 #ifdef DEBUG |
9555 int size = Size(); | 9616 int size = Size(); |
9556 #endif | 9617 #endif |
9557 | 9618 |
9558 Heap* heap = GetHeap(); | 9619 Heap* heap = GetHeap(); |
9559 // Silently ignore the change if value is not a JSObject or null. | 9620 // Silently ignore the change if value is not a JSObject or null. |
9560 // SpiderMonkey behaves this way. | 9621 // SpiderMonkey behaves this way. |
9561 if (!value->IsJSReceiver() && !value->IsNull()) return value; | 9622 if (!value->IsJSReceiver() && !value->IsNull()) return value; |
(...skipping 4361 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
13923 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); | 13984 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); |
13924 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); | 13985 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); |
13925 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); | 13986 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); |
13926 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); | 13987 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); |
13927 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); | 13988 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); |
13928 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); | 13989 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); |
13929 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); | 13990 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); |
13930 } | 13991 } |
13931 | 13992 |
13932 } } // namespace v8::internal | 13993 } } // namespace v8::internal |
OLD | NEW |