| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 147 | 147 |
| 148 | 148 |
| 149 static void CopyObjectToObjectElements(FixedArray* from, | 149 static void CopyObjectToObjectElements(FixedArray* from, |
| 150 ElementsKind from_kind, | 150 ElementsKind from_kind, |
| 151 uint32_t from_start, | 151 uint32_t from_start, |
| 152 FixedArray* to, | 152 FixedArray* to, |
| 153 ElementsKind to_kind, | 153 ElementsKind to_kind, |
| 154 uint32_t to_start, | 154 uint32_t to_start, |
| 155 int raw_copy_size) { | 155 int raw_copy_size) { |
| 156 ASSERT(to->map() != HEAP->fixed_cow_array_map()); | 156 ASSERT(to->map() != HEAP->fixed_cow_array_map()); |
| 157 AssertNoAllocation no_allocation; |
| 157 int copy_size = raw_copy_size; | 158 int copy_size = raw_copy_size; |
| 158 if (raw_copy_size < 0) { | 159 if (raw_copy_size < 0) { |
| 159 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 160 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 160 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 161 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 161 copy_size = Min(from->length() - from_start, | 162 copy_size = Min(from->length() - from_start, |
| 162 to->length() - to_start); | 163 to->length() - to_start); |
| 163 #ifdef DEBUG | |
| 164 // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already | |
| 165 // marked with the hole. | |
| 166 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 164 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 167 for (int i = to_start + copy_size; i < to->length(); ++i) { | 165 int start = to_start + copy_size; |
| 168 ASSERT(to->get(i)->IsTheHole()); | 166 int length = to->length() - start; |
| 167 if (length > 0) { |
| 168 Heap* heap = from->GetHeap(); |
| 169 MemsetPointer(to->data_start() + start, heap->the_hole_value(), length); |
| 169 } | 170 } |
| 170 } | 171 } |
| 171 #endif | |
| 172 } | 172 } |
| 173 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | 173 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && |
| 174 (copy_size + static_cast<int>(from_start)) <= from->length()); | 174 (copy_size + static_cast<int>(from_start)) <= from->length()); |
| 175 if (copy_size == 0) return; | 175 if (copy_size == 0) return; |
| 176 ASSERT(IsFastSmiOrObjectElementsKind(from_kind)); | 176 ASSERT(IsFastSmiOrObjectElementsKind(from_kind)); |
| 177 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 177 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); |
| 178 Address to_address = to->address() + FixedArray::kHeaderSize; | 178 Address to_address = to->address() + FixedArray::kHeaderSize; |
| 179 Address from_address = from->address() + FixedArray::kHeaderSize; | 179 Address from_address = from->address() + FixedArray::kHeaderSize; |
| 180 CopyWords(reinterpret_cast<Object**>(to_address) + to_start, | 180 CopyWords(reinterpret_cast<Object**>(to_address) + to_start, |
| 181 reinterpret_cast<Object**>(from_address) + from_start, | 181 reinterpret_cast<Object**>(from_address) + from_start, |
| (...skipping 10 matching lines...) Expand all Loading... |
| 192 } | 192 } |
| 193 } | 193 } |
| 194 | 194 |
| 195 | 195 |
| 196 static void CopyDictionaryToObjectElements(SeededNumberDictionary* from, | 196 static void CopyDictionaryToObjectElements(SeededNumberDictionary* from, |
| 197 uint32_t from_start, | 197 uint32_t from_start, |
| 198 FixedArray* to, | 198 FixedArray* to, |
| 199 ElementsKind to_kind, | 199 ElementsKind to_kind, |
| 200 uint32_t to_start, | 200 uint32_t to_start, |
| 201 int raw_copy_size) { | 201 int raw_copy_size) { |
| 202 AssertNoAllocation no_allocation; |
| 202 int copy_size = raw_copy_size; | 203 int copy_size = raw_copy_size; |
| 203 Heap* heap = from->GetHeap(); | 204 Heap* heap = from->GetHeap(); |
| 204 if (raw_copy_size < 0) { | 205 if (raw_copy_size < 0) { |
| 205 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 206 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 206 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 207 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 207 copy_size = from->max_number_key() + 1 - from_start; | 208 copy_size = from->max_number_key() + 1 - from_start; |
| 208 #ifdef DEBUG | |
| 209 // Fast object arrays cannot be uninitialized. Ensure they are already | |
| 210 // marked with the hole. | |
| 211 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 209 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 212 for (int i = to_start + copy_size; i < to->length(); ++i) { | 210 int start = to_start + copy_size; |
| 213 ASSERT(to->get(i)->IsTheHole()); | 211 int length = to->length() - start; |
| 212 if (length > 0) { |
| 213 Heap* heap = from->GetHeap(); |
| 214 MemsetPointer(to->data_start() + start, heap->the_hole_value(), length); |
| 214 } | 215 } |
| 215 } | 216 } |
| 216 #endif | |
| 217 } | 217 } |
| 218 ASSERT(to != from); | 218 ASSERT(to != from); |
| 219 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 219 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); |
| 220 if (copy_size == 0) return; | 220 if (copy_size == 0) return; |
| 221 uint32_t to_length = to->length(); | 221 uint32_t to_length = to->length(); |
| 222 if (to_start + copy_size > to_length) { | 222 if (to_start + copy_size > to_length) { |
| 223 copy_size = to_length - to_start; | 223 copy_size = to_length - to_start; |
| 224 } | 224 } |
| 225 for (int i = 0; i < copy_size; i++) { | 225 for (int i = 0; i < copy_size; i++) { |
| 226 int entry = from->FindEntry(i + from_start); | 226 int entry = from->FindEntry(i + from_start); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 250 ElementsKind to_kind, | 250 ElementsKind to_kind, |
| 251 uint32_t to_start, | 251 uint32_t to_start, |
| 252 int raw_copy_size) { | 252 int raw_copy_size) { |
| 253 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); | 253 ASSERT(IsFastSmiOrObjectElementsKind(to_kind)); |
| 254 int copy_size = raw_copy_size; | 254 int copy_size = raw_copy_size; |
| 255 if (raw_copy_size < 0) { | 255 if (raw_copy_size < 0) { |
| 256 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 256 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
| 257 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 257 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
| 258 copy_size = Min(from->length() - from_start, | 258 copy_size = Min(from->length() - from_start, |
| 259 to->length() - to_start); | 259 to->length() - to_start); |
| 260 #ifdef DEBUG | |
| 261 // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already | |
| 262 // marked with the hole. | |
| 263 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 260 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
| 264 for (int i = to_start + copy_size; i < to->length(); ++i) { | 261 // Also initialize the area that will be copied over since HeapNumber |
| 265 ASSERT(to->get(i)->IsTheHole()); | 262 // allocation below can cause an incremental marking step, requiring all |
| 263 // existing heap objects to be propertly initialized. |
| 264 int start = to_start; |
| 265 int length = to->length() - start; |
| 266 if (length > 0) { |
| 267 Heap* heap = from->GetHeap(); |
| 268 MemsetPointer(to->data_start() + start, heap->the_hole_value(), length); |
| 266 } | 269 } |
| 267 } | 270 } |
| 268 #endif | |
| 269 } | 271 } |
| 270 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | 272 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && |
| 271 (copy_size + static_cast<int>(from_start)) <= from->length()); | 273 (copy_size + static_cast<int>(from_start)) <= from->length()); |
| 272 if (copy_size == 0) return from; | 274 if (copy_size == 0) return from; |
| 273 for (int i = 0; i < copy_size; ++i) { | 275 for (int i = 0; i < copy_size; ++i) { |
| 274 if (IsFastSmiElementsKind(to_kind)) { | 276 if (IsFastSmiElementsKind(to_kind)) { |
| 275 UNIMPLEMENTED(); | 277 UNIMPLEMENTED(); |
| 276 return Failure::Exception(); | 278 return Failure::Exception(); |
| 277 } else { | 279 } else { |
| 278 MaybeObject* maybe_value = from->get(i + from_start); | 280 MaybeObject* maybe_value = from->get(i + from_start); |
| (...skipping 1576 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1855 if (!maybe_obj->To(&new_backing_store)) return maybe_obj; | 1857 if (!maybe_obj->To(&new_backing_store)) return maybe_obj; |
| 1856 new_backing_store->set(0, length); | 1858 new_backing_store->set(0, length); |
| 1857 { MaybeObject* result = array->SetContent(new_backing_store); | 1859 { MaybeObject* result = array->SetContent(new_backing_store); |
| 1858 if (result->IsFailure()) return result; | 1860 if (result->IsFailure()) return result; |
| 1859 } | 1861 } |
| 1860 return array; | 1862 return array; |
| 1861 } | 1863 } |
| 1862 | 1864 |
| 1863 | 1865 |
| 1864 } } // namespace v8::internal | 1866 } } // namespace v8::internal |
| OLD | NEW |