OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
289 } | 289 } |
290 | 290 |
291 | 291 |
292 // ----------------------------------------------------------------------------- | 292 // ----------------------------------------------------------------------------- |
293 // NewSpace | 293 // NewSpace |
294 | 294 |
295 | 295 |
296 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { | 296 MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { |
297 Address old_top = allocation_info_.top; | 297 Address old_top = allocation_info_.top; |
298 #ifdef DEBUG | 298 #ifdef DEBUG |
| 299 // If we are stressing compaction we waste some memory in new space |
| 300 // in order to get more frequent GCs. |
299 if (FLAG_stress_compaction && !HEAP->linear_allocation()) { | 301 if (FLAG_stress_compaction && !HEAP->linear_allocation()) { |
300 if (allocation_info_.limit - old_top >= size_in_bytes * 4) { | 302 if (allocation_info_.limit - old_top >= size_in_bytes * 4) { |
301 int filler_size = size_in_bytes * 4; | 303 int filler_size = size_in_bytes * 4; |
302 for (int i = 0; i < filler_size; i += kPointerSize) { | 304 for (int i = 0; i < filler_size; i += kPointerSize) { |
303 *(reinterpret_cast<Object**>(old_top + i)) = | 305 *(reinterpret_cast<Object**>(old_top + i)) = |
304 HEAP->one_pointer_filler_map(); | 306 HEAP->one_pointer_filler_map(); |
305 } | 307 } |
306 old_top += filler_size; | 308 old_top += filler_size; |
307 allocation_info_.top += filler_size; | 309 allocation_info_.top += filler_size; |
308 } | 310 } |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
353 Map* map = object->map(); | 355 Map* map = object->map(); |
354 Heap* heap = object->GetHeap(); | 356 Heap* heap = object->GetHeap(); |
355 return map == heap->raw_unchecked_free_space_map() | 357 return map == heap->raw_unchecked_free_space_map() |
356 || map == heap->raw_unchecked_one_pointer_filler_map() | 358 || map == heap->raw_unchecked_one_pointer_filler_map() |
357 || map == heap->raw_unchecked_two_pointer_filler_map(); | 359 || map == heap->raw_unchecked_two_pointer_filler_map(); |
358 } | 360 } |
359 | 361 |
360 } } // namespace v8::internal | 362 } } // namespace v8::internal |
361 | 363 |
362 #endif // V8_SPACES_INL_H_ | 364 #endif // V8_SPACES_INL_H_ |
OLD | NEW |