Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/heap/spaces.h

Issue 2440683002: [heap] Move typed slot filtering logic into sweeper. (Closed)
Patch Set: fix test Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/slot-set.h ('k') | src/heap/spaces.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_H_ 5 #ifndef V8_HEAP_SPACES_H_
6 #define V8_HEAP_SPACES_H_ 6 #define V8_HEAP_SPACES_H_
7 7
8 #include <list> 8 #include <list>
9 #include <memory> 9 #include <memory>
10 #include <unordered_set> 10 #include <unordered_set>
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after
336 + kPointerSize // TypedSlotSet* typed_old_to_old_slots_ 336 + kPointerSize // TypedSlotSet* typed_old_to_old_slots_
337 + kPointerSize // SkipList* skip_list_ 337 + kPointerSize // SkipList* skip_list_
338 + kPointerSize // AtomicValue high_water_mark_ 338 + kPointerSize // AtomicValue high_water_mark_
339 + kPointerSize // base::Mutex* mutex_ 339 + kPointerSize // base::Mutex* mutex_
340 + kPointerSize // base::AtomicWord concurrent_sweeping_ 340 + kPointerSize // base::AtomicWord concurrent_sweeping_
341 + 2 * kSizetSize // AtomicNumber free-list statistics 341 + 2 * kSizetSize // AtomicNumber free-list statistics
342 + kPointerSize // AtomicValue next_chunk_ 342 + kPointerSize // AtomicValue next_chunk_
343 + kPointerSize // AtomicValue prev_chunk_ 343 + kPointerSize // AtomicValue prev_chunk_
344 // FreeListCategory categories_[kNumberOfCategories] 344 // FreeListCategory categories_[kNumberOfCategories]
345 + FreeListCategory::kSize * kNumberOfCategories + 345 + FreeListCategory::kSize * kNumberOfCategories +
346 kPointerSize // LocalArrayBufferTracker* local_tracker_ 346 kPointerSize; // LocalArrayBufferTracker* local_tracker_
347 // std::unordered_set<Address>* black_area_end_marker_map_
348 + kPointerSize;
349 347
350 // We add some more space to the computed header size to amount for missing 348 // We add some more space to the computed header size to amount for missing
351 // alignment requirements in our computation. 349 // alignment requirements in our computation.
352 // Try to get kHeaderSize properly aligned on 32-bit and 64-bit machines. 350 // Try to get kHeaderSize properly aligned on 32-bit and 64-bit machines.
353 static const size_t kHeaderSize = kMinHeaderSize; 351 static const size_t kHeaderSize = kMinHeaderSize;
354 352
355 static const int kBodyOffset = 353 static const int kBodyOffset =
356 CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize); 354 CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize);
357 355
358 // The start offset of the object area in a page. Aligned to both maps and 356 // The start offset of the object area in a page. Aligned to both maps and
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
569 owner_ = reinterpret_cast<Address>(space) + kPageHeaderTag; 567 owner_ = reinterpret_cast<Address>(space) + kPageHeaderTag;
570 DCHECK((reinterpret_cast<intptr_t>(owner_) & kPageHeaderTagMask) == 568 DCHECK((reinterpret_cast<intptr_t>(owner_) & kPageHeaderTagMask) ==
571 kPageHeaderTag); 569 kPageHeaderTag);
572 } 570 }
573 571
574 bool HasPageHeader() { return owner() != nullptr; } 572 bool HasPageHeader() { return owner() != nullptr; }
575 573
576 void InsertAfter(MemoryChunk* other); 574 void InsertAfter(MemoryChunk* other);
577 void Unlink(); 575 void Unlink();
578 576
579 void ReleaseBlackAreaEndMarkerMap() {
580 if (black_area_end_marker_map_) {
581 delete black_area_end_marker_map_;
582 black_area_end_marker_map_ = nullptr;
583 }
584 }
585
586 bool IsBlackAreaEndMarker(Address address) {
587 if (black_area_end_marker_map_) {
588 return black_area_end_marker_map_->find(address) !=
589 black_area_end_marker_map_->end();
590 }
591 return false;
592 }
593
594 void AddBlackAreaEndMarker(Address address) {
595 if (!black_area_end_marker_map_) {
596 black_area_end_marker_map_ = new std::unordered_set<Address>();
597 }
598 auto ret = black_area_end_marker_map_->insert(address);
599 USE(ret);
600 // Check that we inserted a new black area end marker.
601 DCHECK(ret.second);
602 }
603
604 bool HasBlackAreas() { return black_area_end_marker_map_ != nullptr; }
605
606 protected: 577 protected:
607 static MemoryChunk* Initialize(Heap* heap, Address base, size_t size, 578 static MemoryChunk* Initialize(Heap* heap, Address base, size_t size,
608 Address area_start, Address area_end, 579 Address area_start, Address area_end,
609 Executability executable, Space* owner, 580 Executability executable, Space* owner,
610 base::VirtualMemory* reservation); 581 base::VirtualMemory* reservation);
611 582
612 // Should be called when memory chunk is about to be freed. 583 // Should be called when memory chunk is about to be freed.
613 void ReleaseAllocatedMemory(); 584 void ReleaseAllocatedMemory();
614 585
615 base::VirtualMemory* reserved_memory() { return &reservation_; } 586 base::VirtualMemory* reserved_memory() { return &reservation_; }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
662 633
663 // next_chunk_ holds a pointer of type MemoryChunk 634 // next_chunk_ holds a pointer of type MemoryChunk
664 base::AtomicValue<MemoryChunk*> next_chunk_; 635 base::AtomicValue<MemoryChunk*> next_chunk_;
665 // prev_chunk_ holds a pointer of type MemoryChunk 636 // prev_chunk_ holds a pointer of type MemoryChunk
666 base::AtomicValue<MemoryChunk*> prev_chunk_; 637 base::AtomicValue<MemoryChunk*> prev_chunk_;
667 638
668 FreeListCategory categories_[kNumberOfCategories]; 639 FreeListCategory categories_[kNumberOfCategories];
669 640
670 LocalArrayBufferTracker* local_tracker_; 641 LocalArrayBufferTracker* local_tracker_;
671 642
672 // Stores the end addresses of black areas.
673 std::unordered_set<Address>* black_area_end_marker_map_;
674
675 private: 643 private:
676 void InitializeReservedMemory() { reservation_.Reset(); } 644 void InitializeReservedMemory() { reservation_.Reset(); }
677 645
678 friend class MemoryAllocator; 646 friend class MemoryAllocator;
679 friend class MemoryChunkValidator; 647 friend class MemoryChunkValidator;
680 }; 648 };
681 649
682 DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags) 650 DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags)
683 651
684 static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory, 652 static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
(...skipping 2248 matching lines...) Expand 10 before | Expand all | Expand 10 after
2933 PageIterator old_iterator_; 2901 PageIterator old_iterator_;
2934 PageIterator code_iterator_; 2902 PageIterator code_iterator_;
2935 PageIterator map_iterator_; 2903 PageIterator map_iterator_;
2936 LargePageIterator lo_iterator_; 2904 LargePageIterator lo_iterator_;
2937 }; 2905 };
2938 2906
2939 } // namespace internal 2907 } // namespace internal
2940 } // namespace v8 2908 } // namespace v8
2941 2909
2942 #endif // V8_HEAP_SPACES_H_ 2910 #endif // V8_HEAP_SPACES_H_
OLDNEW
« no previous file with comments | « src/heap/slot-set.h ('k') | src/heap/spaces.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698