OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 5410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5421 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 5421 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
5422 if (!maybe_result->ToObject(&result)) return maybe_result; | 5422 if (!maybe_result->ToObject(&result)) return maybe_result; |
5423 } | 5423 } |
5424 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( | 5424 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier( |
5425 hash_table_map()); | 5425 hash_table_map()); |
5426 ASSERT(result->IsHashTable()); | 5426 ASSERT(result->IsHashTable()); |
5427 return result; | 5427 return result; |
5428 } | 5428 } |
5429 | 5429 |
5430 | 5430 |
5431 MaybeObject* Heap::AllocateSymbol(PretenureFlag pretenure) { | 5431 MaybeObject* Heap::AllocateSymbol() { |
5432 // Statically ensure that it is safe to allocate symbols in paged spaces. | 5432 // Statically ensure that it is safe to allocate symbols in paged spaces. |
5433 STATIC_ASSERT(Symbol::kSize <= Page::kNonCodeObjectAreaSize); | 5433 STATIC_ASSERT(Symbol::kSize <= Page::kNonCodeObjectAreaSize); |
5434 AllocationSpace space = pretenure == TENURED ? OLD_POINTER_SPACE : NEW_SPACE; | |
5435 | 5434 |
5436 Object* result; | 5435 Object* result; |
5437 MaybeObject* maybe = AllocateRaw(Symbol::kSize, space, OLD_POINTER_SPACE); | 5436 MaybeObject* maybe = |
| 5437 AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); |
5438 if (!maybe->ToObject(&result)) return maybe; | 5438 if (!maybe->ToObject(&result)) return maybe; |
5439 | 5439 |
5440 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); | 5440 HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); |
5441 | 5441 |
5442 // Generate a random hash value. | 5442 // Generate a random hash value. |
5443 int hash; | 5443 int hash; |
5444 int attempts = 0; | 5444 int attempts = 0; |
5445 do { | 5445 do { |
5446 hash = V8::RandomPrivate(isolate()) & Name::kHashBitMask; | 5446 hash = V8::RandomPrivate(isolate()) & Name::kHashBitMask; |
5447 attempts++; | 5447 attempts++; |
(...skipping 2015 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7463 | 7463 |
7464 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { | 7464 void KeyedLookupCache::Update(Map* map, Name* name, int field_offset) { |
7465 if (!name->IsUniqueName()) { | 7465 if (!name->IsUniqueName()) { |
7466 String* internalized_string; | 7466 String* internalized_string; |
7467 if (!HEAP->InternalizeStringIfExists( | 7467 if (!HEAP->InternalizeStringIfExists( |
7468 String::cast(name), &internalized_string)) { | 7468 String::cast(name), &internalized_string)) { |
7469 return; | 7469 return; |
7470 } | 7470 } |
7471 name = internalized_string; | 7471 name = internalized_string; |
7472 } | 7472 } |
| 7473 // This cache is cleared only between mark compact passes, so we expect the |
| 7474 // cache to only contain old space names. |
| 7475 ASSERT(!HEAP->InNewSpace(name)); |
7473 | 7476 |
7474 int index = (Hash(map, name) & kHashMask); | 7477 int index = (Hash(map, name) & kHashMask); |
7475 // After a GC there will be free slots, so we use them in order (this may | 7478 // After a GC there will be free slots, so we use them in order (this may |
7476 // help to get the most frequently used one in position 0). | 7479 // help to get the most frequently used one in position 0). |
7477 for (int i = 0; i< kEntriesPerBucket; i++) { | 7480 for (int i = 0; i< kEntriesPerBucket; i++) { |
7478 Key& key = keys_[index]; | 7481 Key& key = keys_[index]; |
7479 Object* free_entry_indicator = NULL; | 7482 Object* free_entry_indicator = NULL; |
7480 if (key.map == free_entry_indicator) { | 7483 if (key.map == free_entry_indicator) { |
7481 key.map = map; | 7484 key.map = map; |
7482 key.name = name; | 7485 key.name = name; |
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7817 static_cast<int>(object_sizes_last_time_[index])); | 7820 static_cast<int>(object_sizes_last_time_[index])); |
7818 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7821 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
7819 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7822 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7820 | 7823 |
7821 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7824 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7822 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7825 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7823 ClearObjectStats(); | 7826 ClearObjectStats(); |
7824 } | 7827 } |
7825 | 7828 |
7826 } } // namespace v8::internal | 7829 } } // namespace v8::internal |
OLD | NEW |