OLD | NEW |
1 // Copyright (c) 2008, Google Inc. | 1 // Copyright (c) 2008, Google Inc. |
2 // All rights reserved. | 2 // All rights reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
6 // met: | 6 // met: |
7 // | 7 // |
8 // * Redistributions of source code must retain the above copyright | 8 // * Redistributions of source code must retain the above copyright |
9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. |
10 // * Redistributions in binary form must reproduce the above | 10 // * Redistributions in binary form must reproduce the above |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
81 | 81 |
82 // Total byte size in cache | 82 // Total byte size in cache |
83 size_t Size() const { return size_; } | 83 size_t Size() const { return size_; } |
84 | 84 |
85 // Allocate an object of the given size and class. The size given | 85 // Allocate an object of the given size and class. The size given |
86 // must be the same as the size of the class in the size map. | 86 // must be the same as the size of the class in the size map. |
87 void* Allocate(size_t size, size_t cl); | 87 void* Allocate(size_t size, size_t cl); |
88 void Deallocate(void* ptr, size_t size_class); | 88 void Deallocate(void* ptr, size_t size_class); |
89 | 89 |
90 void Scavenge(); | 90 void Scavenge(); |
91 void Print(TCMalloc_Printer* out) const; | |
92 | 91 |
93 int GetSamplePeriod(); | 92 int GetSamplePeriod(); |
94 | 93 |
95 // Record allocation of "k" bytes. Return true iff allocation | 94 // Record allocation of "k" bytes. Return true iff allocation |
96 // should be sampled | 95 // should be sampled |
97 bool SampleAllocation(size_t k); | 96 bool SampleAllocation(size_t k); |
98 | 97 |
99 // Record additional bytes allocated. | 98 // Record additional bytes allocated. |
100 void AddToByteAllocatedTotal(size_t k) { total_bytes_allocated_ += k; } | 99 void AddToByteAllocatedTotal(size_t k) { total_bytes_allocated_ += k; } |
101 | 100 |
(...skipping 16 matching lines...) Expand all Loading... |
118 // Return the number of thread heaps in use. | 117 // Return the number of thread heaps in use. |
119 static inline int HeapsInUse(); | 118 static inline int HeapsInUse(); |
120 | 119 |
121 // Writes to total_bytes the total number of bytes used by all thread heaps. | 120 // Writes to total_bytes the total number of bytes used by all thread heaps. |
122 // class_count must be an array of size kNumClasses. Writes the number of | 121 // class_count must be an array of size kNumClasses. Writes the number of |
123 // items on the corresponding freelist. class_count may be NULL. | 122 // items on the corresponding freelist. class_count may be NULL. |
124 // The storage of both parameters must be zero intialized. | 123 // The storage of both parameters must be zero intialized. |
125 // REQUIRES: Static::pageheap_lock is held. | 124 // REQUIRES: Static::pageheap_lock is held. |
126 static void GetThreadStats(uint64_t* total_bytes, uint64_t* class_count); | 125 static void GetThreadStats(uint64_t* total_bytes, uint64_t* class_count); |
127 | 126 |
128 // Write debugging statistics to 'out'. | |
129 // REQUIRES: Static::pageheap_lock is held. | |
130 static void PrintThreads(TCMalloc_Printer* out); | |
131 | |
132 // Sets the total thread cache size to new_size, recomputing the | 127 // Sets the total thread cache size to new_size, recomputing the |
133 // individual thread cache sizes as necessary. | 128 // individual thread cache sizes as necessary. |
134 // REQUIRES: Static::pageheap lock is held. | 129 // REQUIRES: Static::pageheap lock is held. |
135 static void set_overall_thread_cache_size(size_t new_size); | 130 static void set_overall_thread_cache_size(size_t new_size); |
136 static size_t overall_thread_cache_size() { | 131 static size_t overall_thread_cache_size() { |
137 return overall_thread_cache_size_; | 132 return overall_thread_cache_size_; |
138 } | 133 } |
139 | 134 |
140 private: | 135 private: |
141 class FreeList { | 136 class FreeList { |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
207 length_++; | 202 length_++; |
208 } | 203 } |
209 | 204 |
210 void* Pop() { | 205 void* Pop() { |
211 ASSERT(list_ != NULL); | 206 ASSERT(list_ != NULL); |
212 length_--; | 207 length_--; |
213 if (length_ < lowater_) lowater_ = length_; | 208 if (length_ < lowater_) lowater_ = length_; |
214 return FL_Pop(&list_); | 209 return FL_Pop(&list_); |
215 } | 210 } |
216 | 211 |
| 212 void* Next() { |
| 213 if (list_ == NULL) return NULL; |
| 214 return FL_Next(list_); |
| 215 } |
| 216 |
217 void PushRange(int N, void *start, void *end) { | 217 void PushRange(int N, void *start, void *end) { |
218 FL_PushRange(&list_, start, end); | 218 FL_PushRange(&list_, start, end); |
219 length_ += N; | 219 length_ += N; |
220 } | 220 } |
221 | 221 |
222 void PopRange(int N, void **start, void **end) { | 222 void PopRange(int N, void **start, void **end) { |
223 FL_PopRange(&list_, N, start, end); | 223 FL_PopRange(&list_, N, start, end); |
224 ASSERT(length_ >= N); | 224 ASSERT(length_ >= N); |
225 length_ -= N; | 225 length_ -= N; |
226 if (length_ < lowater_) lowater_ = length_; | 226 if (length_ < lowater_) lowater_ = length_; |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
359 return FetchFromCentralCache(cl, size); | 359 return FetchFromCentralCache(cl, size); |
360 } | 360 } |
361 size_ -= size; | 361 size_ -= size; |
362 return list->Pop(); | 362 return list->Pop(); |
363 } | 363 } |
364 | 364 |
365 inline void ThreadCache::Deallocate(void* ptr, size_t cl) { | 365 inline void ThreadCache::Deallocate(void* ptr, size_t cl) { |
366 FreeList* list = &list_[cl]; | 366 FreeList* list = &list_[cl]; |
367 size_ += Static::sizemap()->ByteSizeForClass(cl); | 367 size_ += Static::sizemap()->ByteSizeForClass(cl); |
368 ssize_t size_headroom = max_size_ - size_ - 1; | 368 ssize_t size_headroom = max_size_ - size_ - 1; |
| 369 |
| 370 // This catches back-to-back frees of allocs in the same size |
| 371 // class. A more comprehensive (and expensive) test would be to walk |
| 372 // the entire freelist. But this might be enough to find some bugs. |
| 373 ASSERT(ptr != list->Next()); |
| 374 |
369 list->Push(ptr); | 375 list->Push(ptr); |
370 ssize_t list_headroom = | 376 ssize_t list_headroom = |
371 static_cast<ssize_t>(list->max_length()) - list->length(); | 377 static_cast<ssize_t>(list->max_length()) - list->length(); |
372 | 378 |
373 // There are two relatively uncommon things that require further work. | 379 // There are two relatively uncommon things that require further work. |
374 // In the common case we're done, and in that case we need a single branch | 380 // In the common case we're done, and in that case we need a single branch |
375 // because of the bitwise-or trick that follows. | 381 // because of the bitwise-or trick that follows. |
376 if ((list_headroom | size_headroom) < 0) { | 382 if ((list_headroom | size_headroom) < 0) { |
377 if (list_headroom < 0) { | 383 if (list_headroom < 0) { |
378 ListTooLong(list, cl); | 384 ListTooLong(list, cl); |
(...skipping 27 matching lines...) Expand all Loading... |
406 // because we may be in the thread destruction code and may have | 412 // because we may be in the thread destruction code and may have |
407 // already cleaned up the cache for this thread. | 413 // already cleaned up the cache for this thread. |
408 inline ThreadCache* ThreadCache::GetCacheIfPresent() { | 414 inline ThreadCache* ThreadCache::GetCacheIfPresent() { |
409 if (!tsd_inited_) return NULL; | 415 if (!tsd_inited_) return NULL; |
410 return GetThreadHeap(); | 416 return GetThreadHeap(); |
411 } | 417 } |
412 | 418 |
413 } // namespace tcmalloc | 419 } // namespace tcmalloc |
414 | 420 |
415 #endif // TCMALLOC_THREAD_CACHE_H_ | 421 #endif // TCMALLOC_THREAD_CACHE_H_ |
OLD | NEW |