OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
58 // - ExternalDoubleElementsAccessor | 58 // - ExternalDoubleElementsAccessor |
59 // - PixelElementsAccessor | 59 // - PixelElementsAccessor |
60 // - DictionaryElementsAccessor | 60 // - DictionaryElementsAccessor |
61 // - NonStrictArgumentsElementsAccessor | 61 // - NonStrictArgumentsElementsAccessor |
62 | 62 |
63 | 63 |
64 namespace v8 { | 64 namespace v8 { |
65 namespace internal { | 65 namespace internal { |
66 | 66 |
67 | 67 |
68 static const int kPackedSizeNotKnown = -1; | |
69 | |
70 | |
68 // First argument in list is the accessor class, the second argument is the | 71 // First argument in list is the accessor class, the second argument is the |
69 // accessor ElementsKind, and the third is the backing store class. Use the | 72 // accessor ElementsKind, and the third is the backing store class. Use the |
70 // fast element handler for smi-only arrays. The implementation is currently | 73 // fast element handler for smi-only arrays. The implementation is currently |
71 // identical. Note that the order must match that of the ElementsKind enum for | 74 // identical. Note that the order must match that of the ElementsKind enum for |
72 // the |accessor_array[]| below to work. | 75 // the |accessor_array[]| below to work. |
73 #define ELEMENTS_LIST(V) \ | 76 #define ELEMENTS_LIST(V) \ |
74 V(FastPackedSmiElementsAccessor, FAST_SMI_ELEMENTS, FixedArray) \ | 77 V(FastPackedSmiElementsAccessor, FAST_SMI_ELEMENTS, FixedArray) \ |
75 V(FastHoleySmiElementsAccessor, FAST_HOLEY_SMI_ELEMENTS, \ | 78 V(FastHoleySmiElementsAccessor, FAST_HOLEY_SMI_ELEMENTS, \ |
76 FixedArray) \ | 79 FixedArray) \ |
77 V(FastPackedObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \ | 80 V(FastPackedObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \ |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
319 Address from_address = from->address() + FixedDoubleArray::kHeaderSize; | 322 Address from_address = from->address() + FixedDoubleArray::kHeaderSize; |
320 to_address += kDoubleSize * to_start; | 323 to_address += kDoubleSize * to_start; |
321 from_address += kDoubleSize * from_start; | 324 from_address += kDoubleSize * from_start; |
322 int words_per_double = (kDoubleSize / kPointerSize); | 325 int words_per_double = (kDoubleSize / kPointerSize); |
323 CopyWords(reinterpret_cast<Object**>(to_address), | 326 CopyWords(reinterpret_cast<Object**>(to_address), |
324 reinterpret_cast<Object**>(from_address), | 327 reinterpret_cast<Object**>(from_address), |
325 words_per_double * copy_size); | 328 words_per_double * copy_size); |
326 } | 329 } |
327 | 330 |
328 | 331 |
332 static void CopySmiToDoubleElements(FixedArray* from, | |
333 uint32_t from_start, | |
334 FixedDoubleArray* to, | |
335 uint32_t to_start, | |
336 int raw_copy_size) { | |
337 int copy_size = raw_copy_size; | |
338 if (raw_copy_size < 0) { | |
339 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | |
340 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | |
341 copy_size = from->length() - from_start; | |
342 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | |
343 for (int i = to_start + copy_size; i < to->length(); ++i) { | |
344 to->set_the_hole(i); | |
345 } | |
346 } | |
347 } | |
348 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | |
349 (copy_size + static_cast<int>(from_start)) <= from->length()); | |
350 if (copy_size == 0) return; | |
351 Object* the_hole = from->GetHeap()->the_hole_value(); | |
352 for (uint32_t from_end = from_start + copy_size; | |
353 from_start < from_end; from_start++, to_start++) { | |
354 Object* hole_or_smi = from->get(from_start); | |
355 if (hole_or_smi == the_hole) { | |
356 to->set_the_hole(to_start); | |
357 } else { | |
358 to->set(to_start, Smi::cast(hole_or_smi)->value()); | |
359 } | |
360 } | |
361 } | |
362 | |
363 | |
364 static void CopyPackedSmiToDoubleElements(FixedArray* from, | |
365 uint32_t from_start, | |
366 FixedDoubleArray* to, | |
367 uint32_t to_start, | |
368 int packed_size, | |
369 int raw_copy_size) { | |
370 int copy_size = raw_copy_size; | |
371 uint32_t to_end; | |
372 if (raw_copy_size < 0) { | |
373 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | |
374 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | |
375 copy_size = from->length() - from_start; | |
376 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | |
377 to_end = to->length(); | |
378 } else { | |
379 to_end = to_start + copy_size; | |
Jakob Kummerow
2012/06/12 12:50:44
I have a suspicion that the Windows compiler might
danno
2012/06/12 15:38:14
Done.
| |
380 } | |
381 } else { | |
382 to_end = to_start + copy_size; | |
383 } | |
384 ASSERT(static_cast<int>(to_end) <= to->length()); | |
385 ASSERT(packed_size >= 0 && packed_size <= copy_size); | |
386 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | |
387 (copy_size + static_cast<int>(from_start)) <= from->length()); | |
388 if (copy_size == 0) return; | |
389 for (uint32_t from_end = from_start + packed_size; | |
390 from_start < from_end; from_start++, to_start++) { | |
391 Object* smi = from->get(from_start); | |
392 ASSERT(!smi->IsTheHole()); | |
393 to->set(to_start, Smi::cast(smi)->value()); | |
394 } | |
395 | |
396 while (to_start < to_end) { | |
397 to->set_the_hole(to_start++); | |
398 } | |
399 } | |
400 | |
401 | |
329 static void CopyObjectToDoubleElements(FixedArray* from, | 402 static void CopyObjectToDoubleElements(FixedArray* from, |
330 uint32_t from_start, | 403 uint32_t from_start, |
331 FixedDoubleArray* to, | 404 FixedDoubleArray* to, |
332 uint32_t to_start, | 405 uint32_t to_start, |
333 int raw_copy_size) { | 406 int raw_copy_size) { |
334 int copy_size = raw_copy_size; | 407 int copy_size = raw_copy_size; |
335 if (raw_copy_size < 0) { | 408 if (raw_copy_size < 0) { |
336 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || | 409 ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd || |
337 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); | 410 raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole); |
338 copy_size = from->length() - from_start; | 411 copy_size = from->length() - from_start; |
339 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { | 412 if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) { |
340 for (int i = to_start + copy_size; i < to->length(); ++i) { | 413 for (int i = to_start + copy_size; i < to->length(); ++i) { |
341 to->set_the_hole(i); | 414 to->set_the_hole(i); |
342 } | 415 } |
343 } | 416 } |
344 } | 417 } |
345 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && | 418 ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() && |
346 (copy_size + static_cast<int>(from_start)) <= from->length()); | 419 (copy_size + static_cast<int>(from_start)) <= from->length()); |
347 if (copy_size == 0) return; | 420 if (copy_size == 0) return; |
348 for (int i = 0; i < copy_size; i++) { | 421 Object* the_hole = from->GetHeap()->the_hole_value(); |
349 Object* hole_or_object = from->get(i + from_start); | 422 for (uint32_t from_end = from_start + copy_size; |
350 if (hole_or_object->IsTheHole()) { | 423 from_start < from_end; from_start++, to_start++) { |
351 to->set_the_hole(i + to_start); | 424 Object* hole_or_object = from->get(from_start); |
425 if (hole_or_object == the_hole) { | |
Jakob Kummerow
2012/06/12 12:50:44
Why this change? Is it faster?
danno
2012/06/12 15:38:14
Yes :-)
On 2012/06/12 12:50:44, Jakob wrote:
| |
426 to->set_the_hole(to_start); | |
352 } else { | 427 } else { |
353 to->set(i + to_start, hole_or_object->Number()); | 428 to->set(to_start, hole_or_object->Number()); |
354 } | 429 } |
355 } | 430 } |
356 } | 431 } |
357 | 432 |
358 | 433 |
359 static void CopyDictionaryToDoubleElements(SeededNumberDictionary* from, | 434 static void CopyDictionaryToDoubleElements(SeededNumberDictionary* from, |
360 uint32_t from_start, | 435 uint32_t from_start, |
361 FixedDoubleArray* to, | 436 FixedDoubleArray* to, |
362 uint32_t to_start, | 437 uint32_t to_start, |
363 int raw_copy_size) { | 438 int raw_copy_size) { |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
520 | 595 |
521 MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj, | 596 MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj, |
522 uint32_t key, | 597 uint32_t key, |
523 JSReceiver::DeleteMode mode) = 0; | 598 JSReceiver::DeleteMode mode) = 0; |
524 | 599 |
525 MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, | 600 MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, |
526 uint32_t from_start, | 601 uint32_t from_start, |
527 FixedArrayBase* to, | 602 FixedArrayBase* to, |
528 ElementsKind to_kind, | 603 ElementsKind to_kind, |
529 uint32_t to_start, | 604 uint32_t to_start, |
605 int packed_size, | |
530 int copy_size) { | 606 int copy_size) { |
531 UNREACHABLE(); | 607 UNREACHABLE(); |
532 return NULL; | 608 return NULL; |
533 } | 609 } |
534 | 610 |
535 MUST_USE_RESULT virtual MaybeObject* CopyElements(JSObject* from_holder, | 611 MUST_USE_RESULT virtual MaybeObject* CopyElements(JSObject* from_holder, |
536 uint32_t from_start, | 612 uint32_t from_start, |
537 FixedArrayBase* to, | 613 FixedArrayBase* to, |
538 ElementsKind to_kind, | 614 ElementsKind to_kind, |
539 uint32_t to_start, | 615 uint32_t to_start, |
540 int copy_size, | 616 int copy_size, |
541 FixedArrayBase* from) { | 617 FixedArrayBase* from) { |
618 int packed_size = kPackedSizeNotKnown; | |
542 if (from == NULL) { | 619 if (from == NULL) { |
543 from = from_holder->elements(); | 620 from = from_holder->elements(); |
544 } | 621 } |
622 | |
623 if (from_holder) { | |
624 ElementsKind elements_kind = from_holder->GetElementsKind(); | |
625 bool is_packed = FLAG_packed_arrays && | |
Jakob Kummerow
2012/06/12 12:50:44
Another unnecessary FLAG check.
danno
2012/06/12 15:38:14
Done.
| |
626 IsFastPackedElementsKind(elements_kind) && from_holder->IsJSArray(); | |
627 if (is_packed) { | |
628 packed_size = Smi::cast(JSArray::cast(from_holder)->length())->value(); | |
629 if (copy_size >= 0 && packed_size > copy_size) { | |
630 packed_size = copy_size; | |
631 } | |
632 } | |
633 } | |
545 if (from->length() == 0) { | 634 if (from->length() == 0) { |
546 return from; | 635 return from; |
547 } | 636 } |
548 return ElementsAccessorSubclass::CopyElementsImpl( | 637 return ElementsAccessorSubclass::CopyElementsImpl( |
549 from, from_start, to, to_kind, to_start, copy_size); | 638 from, from_start, to, to_kind, to_start, packed_size, copy_size); |
550 } | 639 } |
551 | 640 |
552 MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray( | 641 MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray( |
553 Object* receiver, | 642 Object* receiver, |
554 JSObject* holder, | 643 JSObject* holder, |
555 FixedArray* to, | 644 FixedArray* to, |
556 FixedArrayBase* from) { | 645 FixedArrayBase* from) { |
557 int len0 = to->length(); | 646 int len0 = to->length(); |
558 #ifdef DEBUG | 647 #ifdef DEBUG |
559 if (FLAG_enable_slow_asserts) { | 648 if (FLAG_enable_slow_asserts) { |
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
842 explicit FastSmiOrObjectElementsAccessor(const char* name) | 931 explicit FastSmiOrObjectElementsAccessor(const char* name) |
843 : FastElementsAccessor<FastElementsAccessorSubclass, | 932 : FastElementsAccessor<FastElementsAccessorSubclass, |
844 KindTraits, | 933 KindTraits, |
845 kPointerSize>(name) {} | 934 kPointerSize>(name) {} |
846 | 935 |
847 static MaybeObject* CopyElementsImpl(FixedArrayBase* from, | 936 static MaybeObject* CopyElementsImpl(FixedArrayBase* from, |
848 uint32_t from_start, | 937 uint32_t from_start, |
849 FixedArrayBase* to, | 938 FixedArrayBase* to, |
850 ElementsKind to_kind, | 939 ElementsKind to_kind, |
851 uint32_t to_start, | 940 uint32_t to_start, |
941 int packed_size, | |
852 int copy_size) { | 942 int copy_size) { |
853 if (IsFastSmiOrObjectElementsKind(to_kind)) { | 943 if (IsFastSmiOrObjectElementsKind(to_kind)) { |
854 CopyObjectToObjectElements( | 944 CopyObjectToObjectElements( |
855 FixedArray::cast(from), KindTraits::Kind, from_start, | 945 FixedArray::cast(from), KindTraits::Kind, from_start, |
856 FixedArray::cast(to), to_kind, to_start, copy_size); | 946 FixedArray::cast(to), to_kind, to_start, copy_size); |
857 } else if (IsFastDoubleElementsKind(to_kind)) { | 947 } else if (IsFastDoubleElementsKind(to_kind)) { |
858 CopyObjectToDoubleElements( | 948 if (IsFastSmiElementsKind(KindTraits::Kind)) { |
859 FixedArray::cast(from), from_start, | 949 if (IsFastPackedElementsKind(KindTraits::Kind) && |
860 FixedDoubleArray::cast(to), to_start, copy_size); | 950 packed_size != kPackedSizeNotKnown) { |
951 CopyPackedSmiToDoubleElements( | |
952 FixedArray::cast(from), from_start, | |
953 FixedDoubleArray::cast(to), to_start, | |
954 packed_size, copy_size); | |
955 } else { | |
956 CopySmiToDoubleElements( | |
957 FixedArray::cast(from), from_start, | |
958 FixedDoubleArray::cast(to), to_start, copy_size); | |
959 } | |
960 } else { | |
961 CopyObjectToDoubleElements( | |
962 FixedArray::cast(from), from_start, | |
963 FixedDoubleArray::cast(to), to_start, copy_size); | |
964 } | |
861 } else { | 965 } else { |
862 UNREACHABLE(); | 966 UNREACHABLE(); |
863 } | 967 } |
864 return to->GetHeap()->undefined_value(); | 968 return to->GetHeap()->undefined_value(); |
865 } | 969 } |
866 | 970 |
867 | 971 |
868 static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj, | 972 static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj, |
869 uint32_t capacity, | 973 uint32_t capacity, |
870 uint32_t length) { | 974 uint32_t length) { |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
945 return obj->SetFastDoubleElementsCapacityAndLength(capacity, | 1049 return obj->SetFastDoubleElementsCapacityAndLength(capacity, |
946 length); | 1050 length); |
947 } | 1051 } |
948 | 1052 |
949 protected: | 1053 protected: |
950 static MaybeObject* CopyElementsImpl(FixedArrayBase* from, | 1054 static MaybeObject* CopyElementsImpl(FixedArrayBase* from, |
951 uint32_t from_start, | 1055 uint32_t from_start, |
952 FixedArrayBase* to, | 1056 FixedArrayBase* to, |
953 ElementsKind to_kind, | 1057 ElementsKind to_kind, |
954 uint32_t to_start, | 1058 uint32_t to_start, |
1059 int packed_size, | |
955 int copy_size) { | 1060 int copy_size) { |
956 switch (to_kind) { | 1061 switch (to_kind) { |
957 case FAST_SMI_ELEMENTS: | 1062 case FAST_SMI_ELEMENTS: |
958 case FAST_ELEMENTS: | 1063 case FAST_ELEMENTS: |
959 case FAST_HOLEY_SMI_ELEMENTS: | 1064 case FAST_HOLEY_SMI_ELEMENTS: |
960 case FAST_HOLEY_ELEMENTS: | 1065 case FAST_HOLEY_ELEMENTS: |
961 return CopyDoubleToObjectElements( | 1066 return CopyDoubleToObjectElements( |
962 FixedDoubleArray::cast(from), from_start, FixedArray::cast(to), | 1067 FixedDoubleArray::cast(from), from_start, FixedArray::cast(to), |
963 to_kind, to_start, copy_size); | 1068 to_kind, to_start, copy_size); |
964 case FAST_DOUBLE_ELEMENTS: | 1069 case FAST_DOUBLE_ELEMENTS: |
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1258 } | 1363 } |
1259 } | 1364 } |
1260 return heap->true_value(); | 1365 return heap->true_value(); |
1261 } | 1366 } |
1262 | 1367 |
1263 MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, | 1368 MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, |
1264 uint32_t from_start, | 1369 uint32_t from_start, |
1265 FixedArrayBase* to, | 1370 FixedArrayBase* to, |
1266 ElementsKind to_kind, | 1371 ElementsKind to_kind, |
1267 uint32_t to_start, | 1372 uint32_t to_start, |
1373 int packed_size, | |
1268 int copy_size) { | 1374 int copy_size) { |
1269 switch (to_kind) { | 1375 switch (to_kind) { |
1270 case FAST_SMI_ELEMENTS: | 1376 case FAST_SMI_ELEMENTS: |
1271 case FAST_ELEMENTS: | 1377 case FAST_ELEMENTS: |
1272 case FAST_HOLEY_SMI_ELEMENTS: | 1378 case FAST_HOLEY_SMI_ELEMENTS: |
1273 case FAST_HOLEY_ELEMENTS: | 1379 case FAST_HOLEY_ELEMENTS: |
1274 CopyDictionaryToObjectElements( | 1380 CopyDictionaryToObjectElements( |
1275 SeededNumberDictionary::cast(from), from_start, | 1381 SeededNumberDictionary::cast(from), from_start, |
1276 FixedArray::cast(to), to_kind, to_start, copy_size); | 1382 FixedArray::cast(to), to_kind, to_start, copy_size); |
1277 return from; | 1383 return from; |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1410 } | 1516 } |
1411 } | 1517 } |
1412 return obj->GetHeap()->true_value(); | 1518 return obj->GetHeap()->true_value(); |
1413 } | 1519 } |
1414 | 1520 |
1415 MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, | 1521 MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from, |
1416 uint32_t from_start, | 1522 uint32_t from_start, |
1417 FixedArrayBase* to, | 1523 FixedArrayBase* to, |
1418 ElementsKind to_kind, | 1524 ElementsKind to_kind, |
1419 uint32_t to_start, | 1525 uint32_t to_start, |
1526 int packed_size, | |
1420 int copy_size) { | 1527 int copy_size) { |
1421 FixedArray* parameter_map = FixedArray::cast(from); | 1528 FixedArray* parameter_map = FixedArray::cast(from); |
1422 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); | 1529 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); |
1423 ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments); | 1530 ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments); |
1424 return accessor->CopyElements(NULL, from_start, to, to_kind, | 1531 return accessor->CopyElements(NULL, from_start, to, to_kind, |
1425 to_start, copy_size, arguments); | 1532 to_start, copy_size, arguments); |
1426 } | 1533 } |
1427 | 1534 |
1428 static uint32_t GetCapacityImpl(FixedArray* parameter_map) { | 1535 static uint32_t GetCapacityImpl(FixedArray* parameter_map) { |
1429 FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1)); | 1536 FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1)); |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1574 if (!maybe_obj->To(&new_backing_store)) return maybe_obj; | 1681 if (!maybe_obj->To(&new_backing_store)) return maybe_obj; |
1575 new_backing_store->set(0, length); | 1682 new_backing_store->set(0, length); |
1576 { MaybeObject* result = array->SetContent(new_backing_store); | 1683 { MaybeObject* result = array->SetContent(new_backing_store); |
1577 if (result->IsFailure()) return result; | 1684 if (result->IsFailure()) return result; |
1578 } | 1685 } |
1579 return array; | 1686 return array; |
1580 } | 1687 } |
1581 | 1688 |
1582 | 1689 |
1583 } } // namespace v8::internal | 1690 } } // namespace v8::internal |
OLD | NEW |