| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 204 | 204 | 
| 205 | 205 | 
| 206 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { | 206 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { | 
| 207   return CopyFixedDoubleArrayWithMap(src, src->map()); | 207   return CopyFixedDoubleArrayWithMap(src, src->map()); | 
| 208 } | 208 } | 
| 209 | 209 | 
| 210 | 210 | 
| 211 MaybeObject* Heap::AllocateRaw(int size_in_bytes, | 211 MaybeObject* Heap::AllocateRaw(int size_in_bytes, | 
| 212                                AllocationSpace space, | 212                                AllocationSpace space, | 
| 213                                AllocationSpace retry_space) { | 213                                AllocationSpace retry_space) { | 
| 214   SLOW_ASSERT(!isolate_->optimizing_compiler_thread()->IsOptimizerThread()); | 214   ASSERT(AllowHandleAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); | 
| 215   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); |  | 
| 216   ASSERT(space != NEW_SPACE || | 215   ASSERT(space != NEW_SPACE || | 
| 217          retry_space == OLD_POINTER_SPACE || | 216          retry_space == OLD_POINTER_SPACE || | 
| 218          retry_space == OLD_DATA_SPACE || | 217          retry_space == OLD_DATA_SPACE || | 
| 219          retry_space == LO_SPACE); | 218          retry_space == LO_SPACE); | 
| 220 #ifdef DEBUG | 219 #ifdef DEBUG | 
| 221   if (FLAG_gc_interval >= 0 && | 220   if (FLAG_gc_interval >= 0 && | 
| 222       !disallow_allocation_failure_ && | 221       !disallow_allocation_failure_ && | 
| 223       Heap::allocation_timeout_-- <= 0) { | 222       Heap::allocation_timeout_-- <= 0) { | 
| 224     return Failure::RetryAfterGC(space); | 223     return Failure::RetryAfterGC(space); | 
| 225   } | 224   } | 
| (...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 635 | 634 | 
| 636 | 635 | 
| 637 #define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \ | 636 #define CALL_HEAP_FUNCTION_PASS_EXCEPTION(ISOLATE, FUNCTION_CALL) \ | 
| 638   CALL_AND_RETRY(ISOLATE,                                         \ | 637   CALL_AND_RETRY(ISOLATE,                                         \ | 
| 639                  FUNCTION_CALL,                                   \ | 638                  FUNCTION_CALL,                                   \ | 
| 640                  return __object__,                               \ | 639                  return __object__,                               \ | 
| 641                  return __maybe_object__,                         \ | 640                  return __maybe_object__,                         \ | 
| 642                  return __maybe_object__) | 641                  return __maybe_object__) | 
| 643 | 642 | 
| 644 | 643 | 
| 645 #ifdef DEBUG |  | 
| 646 |  | 
| 647 inline bool Heap::allow_allocation(bool new_state) { |  | 
| 648   bool old = allocation_allowed_; |  | 
| 649   allocation_allowed_ = new_state; |  | 
| 650   return old; |  | 
| 651 } |  | 
| 652 |  | 
| 653 inline void Heap::set_allow_allocation(bool allocation_allowed) { |  | 
| 654   allocation_allowed_ = allocation_allowed; |  | 
| 655 } |  | 
| 656 |  | 
| 657 #endif |  | 
| 658 |  | 
| 659 |  | 
| 660 void ExternalStringTable::AddString(String* string) { | 644 void ExternalStringTable::AddString(String* string) { | 
| 661   ASSERT(string->IsExternalString()); | 645   ASSERT(string->IsExternalString()); | 
| 662   if (heap_->InNewSpace(string)) { | 646   if (heap_->InNewSpace(string)) { | 
| 663     new_space_strings_.Add(string); | 647     new_space_strings_.Add(string); | 
| 664   } else { | 648   } else { | 
| 665     old_space_strings_.Add(string); | 649     old_space_strings_.Add(string); | 
| 666   } | 650   } | 
| 667 } | 651 } | 
| 668 | 652 | 
| 669 | 653 | 
| (...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 860 } | 844 } | 
| 861 | 845 | 
| 862 | 846 | 
| 863 DisallowAllocationFailure::~DisallowAllocationFailure() { | 847 DisallowAllocationFailure::~DisallowAllocationFailure() { | 
| 864 #ifdef DEBUG | 848 #ifdef DEBUG | 
| 865   HEAP->disallow_allocation_failure_ = old_state_; | 849   HEAP->disallow_allocation_failure_ = old_state_; | 
| 866 #endif | 850 #endif | 
| 867 } | 851 } | 
| 868 | 852 | 
| 869 | 853 | 
| 870 #ifdef DEBUG |  | 
| 871 bool EnterAllocationScope(Isolate* isolate, bool allow_allocation) { |  | 
| 872   bool active = !isolate->optimizing_compiler_thread()->IsOptimizerThread(); |  | 
| 873   bool last_state = isolate->heap()->IsAllocationAllowed(); |  | 
| 874   if (active) { |  | 
| 875     // TODO(yangguo): Make HandleDereferenceGuard avoid isolate mutation in the |  | 
| 876     // same way if running on the optimizer thread. |  | 
| 877     isolate->heap()->set_allow_allocation(allow_allocation); |  | 
| 878   } |  | 
| 879   return last_state; |  | 
| 880 } |  | 
| 881 |  | 
| 882 |  | 
| 883 void ExitAllocationScope(Isolate* isolate, bool last_state) { |  | 
| 884   bool active = !isolate->optimizing_compiler_thread()->IsOptimizerThread(); |  | 
| 885   if (active) { |  | 
| 886     isolate->heap()->set_allow_allocation(last_state); |  | 
| 887   } |  | 
| 888 } |  | 
| 889 |  | 
| 890 |  | 
| 891 AssertNoAllocation::AssertNoAllocation() |  | 
| 892     : last_state_(EnterAllocationScope(ISOLATE, false)) { |  | 
| 893 } |  | 
| 894 |  | 
| 895 AssertNoAllocation::~AssertNoAllocation() { |  | 
| 896   ExitAllocationScope(ISOLATE, last_state_); |  | 
| 897 } |  | 
| 898 |  | 
| 899 DisableAssertNoAllocation::DisableAssertNoAllocation() |  | 
| 900   : last_state_(EnterAllocationScope(ISOLATE, true)) { |  | 
| 901 } |  | 
| 902 |  | 
| 903 DisableAssertNoAllocation::~DisableAssertNoAllocation() { |  | 
| 904   ExitAllocationScope(ISOLATE, last_state_); |  | 
| 905 } |  | 
| 906 #else |  | 
| 907 |  | 
| 908 AssertNoAllocation::AssertNoAllocation() { } |  | 
| 909 AssertNoAllocation::~AssertNoAllocation() { } |  | 
| 910 DisableAssertNoAllocation::DisableAssertNoAllocation() { } |  | 
| 911 DisableAssertNoAllocation::~DisableAssertNoAllocation() { } |  | 
| 912 |  | 
| 913 #endif |  | 
| 914 |  | 
| 915 |  | 
| 916 } }  // namespace v8::internal | 854 } }  // namespace v8::internal | 
| 917 | 855 | 
| 918 #endif  // V8_HEAP_INL_H_ | 856 #endif  // V8_HEAP_INL_H_ | 
| OLD | NEW | 
|---|