Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1849 } else if (type == FIXED_ARRAY_TYPE) { | 1849 } else if (type == FIXED_ARRAY_TYPE) { |
| 1850 ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE); | 1850 ASSERT(sub_type <= LAST_FIXED_ARRAY_SUB_TYPE); |
| 1851 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++; | 1851 object_counts_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type]++; |
| 1852 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size; | 1852 object_sizes_[FIRST_FIXED_ARRAY_SUB_TYPE + sub_type] += size; |
| 1853 } | 1853 } |
| 1854 } | 1854 } |
| 1855 } | 1855 } |
| 1856 | 1856 |
| 1857 void CheckpointObjectStats(); | 1857 void CheckpointObjectStats(); |
| 1858 | 1858 |
| 1859 // We don't use a ScopedLock here since we want to lock the heap | |
| 1860 // only when FLAG_parallel_recompilation is true. | |
| 1861 class RelocationLock { | |
| 1862 public: | |
| 1863 explicit RelocationLock(Heap* heap); | |
| 1864 | |
| 1865 ~RelocationLock() { | |
| 1866 if (FLAG_parallel_recompilation) { | |
| 1867 #ifdef DEBUG | |
| 1868 heap_->relocation_mutex_locked_by_optimizer_thread_ = false; | |
|
Hannes Payer (out of office)
2013/04/26 07:27:47
Just set it to false if the destructor is called b
| |
| 1869 #endif // DEBUG | |
| 1870 heap_->relocation_mutex_->Unlock(); | |
| 1871 } | |
| 1872 } | |
| 1873 | |
| 1874 #ifdef DEBUG | |
| 1875 static bool IsLockedByOptimizerThread(Heap* heap) { | |
|
Hannes Payer (out of office)
2013/04/26 07:27:47
I think you may want to avoid static methods since
| |
| 1876 return heap->relocation_mutex_locked_by_optimizer_thread_; | |
| 1877 } | |
| 1878 #endif // DEBUG | |
| 1879 | |
| 1880 private: | |
| 1881 Heap* heap_; | |
| 1882 }; | |
| 1883 | |
| 1859 private: | 1884 private: |
| 1860 Heap(); | 1885 Heap(); |
| 1861 | 1886 |
| 1862 // This can be calculated directly from a pointer to the heap; however, it is | 1887 // This can be calculated directly from a pointer to the heap; however, it is |
| 1863 // more expedient to get at the isolate directly from within Heap methods. | 1888 // more expedient to get at the isolate directly from within Heap methods. |
| 1864 Isolate* isolate_; | 1889 Isolate* isolate_; |
| 1865 | 1890 |
| 1866 Object* roots_[kRootListLength]; | 1891 Object* roots_[kRootListLength]; |
| 1867 | 1892 |
| 1868 intptr_t code_range_size_; | 1893 intptr_t code_range_size_; |
| (...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2323 bool configured_; | 2348 bool configured_; |
| 2324 | 2349 |
| 2325 ExternalStringTable external_string_table_; | 2350 ExternalStringTable external_string_table_; |
| 2326 | 2351 |
| 2327 ErrorObjectList error_object_list_; | 2352 ErrorObjectList error_object_list_; |
| 2328 | 2353 |
| 2329 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_; | 2354 VisitorDispatchTable<ScavengingCallback> scavenging_visitors_table_; |
| 2330 | 2355 |
| 2331 MemoryChunk* chunks_queued_for_free_; | 2356 MemoryChunk* chunks_queued_for_free_; |
| 2332 | 2357 |
| 2358 Mutex* relocation_mutex_; | |
| 2359 #ifdef DEBUG | |
| 2360 bool relocation_mutex_locked_by_optimizer_thread_; | |
| 2361 #endif // DEBUG; | |
| 2362 | |
| 2333 friend class Factory; | 2363 friend class Factory; |
| 2334 friend class GCTracer; | 2364 friend class GCTracer; |
| 2335 friend class DisallowAllocationFailure; | 2365 friend class DisallowAllocationFailure; |
| 2336 friend class AlwaysAllocateScope; | 2366 friend class AlwaysAllocateScope; |
| 2337 friend class Page; | 2367 friend class Page; |
| 2338 friend class Isolate; | 2368 friend class Isolate; |
| 2339 friend class MarkCompactCollector; | 2369 friend class MarkCompactCollector; |
| 2340 friend class MarkCompactMarkingVisitor; | 2370 friend class MarkCompactMarkingVisitor; |
| 2341 friend class MapCompact; | 2371 friend class MapCompact; |
| 2342 #ifdef VERIFY_HEAP | 2372 #ifdef VERIFY_HEAP |
| (...skipping 685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3028 AssertNoAllocation no_alloc; // i.e. no gc allowed. | 3058 AssertNoAllocation no_alloc; // i.e. no gc allowed. |
| 3029 | 3059 |
| 3030 private: | 3060 private: |
| 3031 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 3061 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
| 3032 }; | 3062 }; |
| 3033 #endif // DEBUG | 3063 #endif // DEBUG |
| 3034 | 3064 |
| 3035 } } // namespace v8::internal | 3065 } } // namespace v8::internal |
| 3036 | 3066 |
| 3037 #endif // V8_HEAP_H_ | 3067 #endif // V8_HEAP_H_ |
| OLD | NEW |