Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(279)

Side by Side Diff: src/arm/stub-cache-arm.cc

Issue 14142005: Implement Polymorphic Store ICs (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed comments Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/ic-arm.cc ('k') | src/ast.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after
430 Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name); 430 Handle<Cell> cell = GlobalObject::EnsurePropertyCell(global, name);
431 ASSERT(cell->value()->IsTheHole()); 431 ASSERT(cell->value()->IsTheHole());
432 __ mov(scratch, Operand(cell)); 432 __ mov(scratch, Operand(cell));
433 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); 433 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
434 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 434 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
435 __ cmp(scratch, ip); 435 __ cmp(scratch, ip);
436 __ b(ne, miss); 436 __ b(ne, miss);
437 } 437 }
438 438
439 439
440 void BaseStoreStubCompiler::GenerateNegativeHolderLookup(
441 MacroAssembler* masm,
442 Handle<JSObject> holder,
443 Register holder_reg,
444 Handle<Name> name,
445 Label* miss) {
446 if (holder->IsJSGlobalObject()) {
447 GenerateCheckPropertyCell(
448 masm,
ulan 2013/07/09 08:08:39 Nit: arguments fit in one or two lines.
Toon Verwaest 2013/07/09 08:22:26 Done.
449 Handle<GlobalObject>::cast(holder),
450 name,
451 scratch1(),
452 miss);
453 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
454 GenerateDictionaryNegativeLookup(
455 masm, miss, holder_reg, name, scratch1(), scratch2());
456 }
457 }
458
459
440 // Generate StoreTransition code, value is passed in r0 register. 460 // Generate StoreTransition code, value is passed in r0 register.
441 // When leaving generated code after success, the receiver_reg and name_reg 461 // When leaving generated code after success, the receiver_reg and name_reg
442 // may be clobbered. Upon branch to miss_label, the receiver and name 462 // may be clobbered. Upon branch to miss_label, the receiver and name
443 // registers have their original values. 463 // registers have their original values.
444 void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, 464 void BaseStoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
445 Handle<JSObject> object, 465 Handle<JSObject> object,
446 LookupResult* lookup, 466 LookupResult* lookup,
447 Handle<Map> transition, 467 Handle<Map> transition,
448 Handle<Name> name, 468 Handle<Name> name,
449 Register receiver_reg, 469 Register receiver_reg,
450 Register name_reg, 470 Register storage_reg,
451 Register value_reg, 471 Register value_reg,
452 Register scratch1, 472 Register scratch1,
453 Register scratch2, 473 Register scratch2,
454 Register scratch3, 474 Register scratch3,
455 Label* miss_label, 475 Label* miss_label,
ulan 2013/07/09 08:08:39 Can we use "miss" instead of "miss_label"?
Toon Verwaest 2013/07/09 08:22:26 I'll let this as cleanup for later, given that it'
456 Label* miss_restore_name, 476 Label* slow) {
457 Label* slow) {
458 // r0 : value 477 // r0 : value
459 Label exit; 478 Label exit;
460 479
461 // Check that the map of the object hasn't changed.
462 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
463 DO_SMI_CHECK);
464
465 // Perform global security token check if needed.
466 if (object->IsJSGlobalProxy()) {
467 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
468 }
469
470 int descriptor = transition->LastAdded(); 480 int descriptor = transition->LastAdded();
471 DescriptorArray* descriptors = transition->instance_descriptors(); 481 DescriptorArray* descriptors = transition->instance_descriptors();
472 PropertyDetails details = descriptors->GetDetails(descriptor); 482 PropertyDetails details = descriptors->GetDetails(descriptor);
473 Representation representation = details.representation(); 483 Representation representation = details.representation();
474 ASSERT(!representation.IsNone()); 484 ASSERT(!representation.IsNone());
475 485
476 // Ensure no transitions to deprecated maps are followed.
477 __ CheckMapDeprecated(transition, scratch1, miss_label);
478
479 // Check that we are allowed to write this.
480 if (object->GetPrototype()->IsJSObject()) {
481 JSObject* holder;
482 // holder == object indicates that no property was found.
483 if (lookup->holder() != *object) {
484 holder = lookup->holder();
485 } else {
486 // Find the top object.
487 holder = *object;
488 do {
489 holder = JSObject::cast(holder->GetPrototype());
490 } while (holder->GetPrototype()->IsJSObject());
491 }
492 Register holder_reg = CheckPrototypes(
493 object, receiver_reg, Handle<JSObject>(holder), name_reg,
494 scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER);
495 // If no property was found, and the holder (the last object in the
496 // prototype chain) is in slow mode, we need to do a negative lookup on the
497 // holder.
498 if (lookup->holder() == *object) {
499 if (holder->IsJSGlobalObject()) {
500 GenerateCheckPropertyCell(
501 masm,
502 Handle<GlobalObject>(GlobalObject::cast(holder)),
503 name,
504 scratch1,
505 miss_restore_name);
506 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
507 GenerateDictionaryNegativeLookup(
508 masm, miss_restore_name, holder_reg, name, scratch1, scratch2);
509 }
510 }
511 }
512
513 Register storage_reg = name_reg;
514
515 if (details.type() == CONSTANT_FUNCTION) { 486 if (details.type() == CONSTANT_FUNCTION) {
516 Handle<HeapObject> constant( 487 Handle<HeapObject> constant(
517 HeapObject::cast(descriptors->GetValue(descriptor))); 488 HeapObject::cast(descriptors->GetValue(descriptor)));
518 __ LoadHeapObject(scratch1, constant); 489 __ LoadHeapObject(scratch1, constant);
519 __ cmp(value_reg, scratch1); 490 __ cmp(value_reg, scratch1);
520 __ b(ne, miss_restore_name); 491 __ b(ne, miss_label);
521 } else if (FLAG_track_fields && representation.IsSmi()) { 492 } else if (FLAG_track_fields && representation.IsSmi()) {
522 __ JumpIfNotSmi(value_reg, miss_restore_name); 493 __ JumpIfNotSmi(value_reg, miss_label);
523 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 494 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
524 __ JumpIfSmi(value_reg, miss_restore_name); 495 __ JumpIfSmi(value_reg, miss_label);
525 } else if (FLAG_track_double_fields && representation.IsDouble()) { 496 } else if (FLAG_track_double_fields && representation.IsDouble()) {
526 Label do_store, heap_number; 497 Label do_store, heap_number;
527 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); 498 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
528 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); 499 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
529 500
530 __ JumpIfNotSmi(value_reg, &heap_number); 501 __ JumpIfNotSmi(value_reg, &heap_number);
531 __ SmiUntag(scratch1, value_reg); 502 __ SmiUntag(scratch1, value_reg);
532 __ vmov(s0, scratch1); 503 __ vmov(s0, scratch1);
533 __ vcvt_f64_s32(d0, s0); 504 __ vcvt_f64_s32(d0, s0);
534 __ jmp(&do_store); 505 __ jmp(&do_store);
535 506
536 __ bind(&heap_number); 507 __ bind(&heap_number);
537 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, 508 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
538 miss_restore_name, DONT_DO_SMI_CHECK); 509 miss_label, DONT_DO_SMI_CHECK);
539 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 510 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
540 511
541 __ bind(&do_store); 512 __ bind(&do_store);
542 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); 513 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
543 } 514 }
544 515
545 // Stub never generated for non-global objects that require access 516 // Stub never generated for non-global objects that require access
546 // checks. 517 // checks.
547 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 518 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
548 519
(...skipping 10 matching lines...) Expand all
559 masm->isolate()), 530 masm->isolate()),
560 3, 531 3,
561 1); 532 1);
562 return; 533 return;
563 } 534 }
564 535
565 // Update the map of the object. 536 // Update the map of the object.
566 __ mov(scratch1, Operand(transition)); 537 __ mov(scratch1, Operand(transition));
567 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); 538 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
568 539
569 // Update the write barrier for the map field and pass the now unused 540 // Update the write barrier for the map field.
570 // name_reg as scratch register.
571 __ RecordWriteField(receiver_reg, 541 __ RecordWriteField(receiver_reg,
572 HeapObject::kMapOffset, 542 HeapObject::kMapOffset,
573 scratch1, 543 scratch1,
574 scratch2, 544 scratch2,
575 kLRHasNotBeenSaved, 545 kLRHasNotBeenSaved,
576 kDontSaveFPRegs, 546 kDontSaveFPRegs,
577 OMIT_REMEMBERED_SET, 547 OMIT_REMEMBERED_SET,
578 OMIT_SMI_CHECK); 548 OMIT_SMI_CHECK);
579 549
580 if (details.type() == CONSTANT_FUNCTION) { 550 if (details.type() == CONSTANT_FUNCTION) {
(...skipping 20 matching lines...) Expand all
601 __ str(storage_reg, FieldMemOperand(receiver_reg, offset)); 571 __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
602 } else { 572 } else {
603 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); 573 __ str(value_reg, FieldMemOperand(receiver_reg, offset));
604 } 574 }
605 575
606 if (!FLAG_track_fields || !representation.IsSmi()) { 576 if (!FLAG_track_fields || !representation.IsSmi()) {
607 // Skip updating write barrier if storing a smi. 577 // Skip updating write barrier if storing a smi.
608 __ JumpIfSmi(value_reg, &exit); 578 __ JumpIfSmi(value_reg, &exit);
609 579
610 // Update the write barrier for the array address. 580 // Update the write barrier for the array address.
611 // Pass the now unused name_reg as a scratch register.
612 if (!FLAG_track_double_fields || !representation.IsDouble()) { 581 if (!FLAG_track_double_fields || !representation.IsDouble()) {
613 __ mov(name_reg, value_reg); 582 __ mov(storage_reg, value_reg);
614 } else {
615 ASSERT(storage_reg.is(name_reg));
616 } 583 }
617 __ RecordWriteField(receiver_reg, 584 __ RecordWriteField(receiver_reg,
618 offset, 585 offset,
619 name_reg, 586 storage_reg,
620 scratch1, 587 scratch1,
621 kLRHasNotBeenSaved, 588 kLRHasNotBeenSaved,
622 kDontSaveFPRegs, 589 kDontSaveFPRegs,
623 EMIT_REMEMBERED_SET, 590 EMIT_REMEMBERED_SET,
624 smi_check); 591 smi_check);
625 } 592 }
626 } else { 593 } else {
627 // Write to the properties array. 594 // Write to the properties array.
628 int offset = index * kPointerSize + FixedArray::kHeaderSize; 595 int offset = index * kPointerSize + FixedArray::kHeaderSize;
629 // Get the properties array 596 // Get the properties array
630 __ ldr(scratch1, 597 __ ldr(scratch1,
631 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); 598 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
632 if (FLAG_track_double_fields && representation.IsDouble()) { 599 if (FLAG_track_double_fields && representation.IsDouble()) {
633 __ str(storage_reg, FieldMemOperand(scratch1, offset)); 600 __ str(storage_reg, FieldMemOperand(scratch1, offset));
634 } else { 601 } else {
635 __ str(value_reg, FieldMemOperand(scratch1, offset)); 602 __ str(value_reg, FieldMemOperand(scratch1, offset));
636 } 603 }
637 604
638 if (!FLAG_track_fields || !representation.IsSmi()) { 605 if (!FLAG_track_fields || !representation.IsSmi()) {
639 // Skip updating write barrier if storing a smi. 606 // Skip updating write barrier if storing a smi.
640 __ JumpIfSmi(value_reg, &exit); 607 __ JumpIfSmi(value_reg, &exit);
641 608
642 // Update the write barrier for the array address. 609 // Update the write barrier for the array address.
643 // Ok to clobber receiver_reg and name_reg, since we return.
644 if (!FLAG_track_double_fields || !representation.IsDouble()) { 610 if (!FLAG_track_double_fields || !representation.IsDouble()) {
645 __ mov(name_reg, value_reg); 611 __ mov(storage_reg, value_reg);
646 } else {
647 ASSERT(storage_reg.is(name_reg));
648 } 612 }
649 __ RecordWriteField(scratch1, 613 __ RecordWriteField(scratch1,
650 offset, 614 offset,
651 name_reg, 615 storage_reg,
652 receiver_reg, 616 receiver_reg,
653 kLRHasNotBeenSaved, 617 kLRHasNotBeenSaved,
654 kDontSaveFPRegs, 618 kDontSaveFPRegs,
655 EMIT_REMEMBERED_SET, 619 EMIT_REMEMBERED_SET,
656 smi_check); 620 smi_check);
657 } 621 }
658 } 622 }
659 623
660 // Return the value (register r0). 624 // Return the value (register r0).
661 ASSERT(value_reg.is(r0)); 625 ASSERT(value_reg.is(r0));
662 __ bind(&exit); 626 __ bind(&exit);
663 __ Ret(); 627 __ Ret();
664 } 628 }
665 629
666 630
667 // Generate StoreField code, value is passed in r0 register. 631 // Generate StoreField code, value is passed in r0 register.
668 // When leaving generated code after success, the receiver_reg and name_reg 632 // When leaving generated code after success, the receiver_reg and name_reg
669 // may be clobbered. Upon branch to miss_label, the receiver and name 633 // may be clobbered. Upon branch to miss_label, the receiver and name
670 // registers have their original values. 634 // registers have their original values.
671 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 635 void BaseStoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
672 Handle<JSObject> object, 636 Handle<JSObject> object,
673 LookupResult* lookup, 637 LookupResult* lookup,
674 Register receiver_reg, 638 Register receiver_reg,
675 Register name_reg, 639 Register name_reg,
676 Register value_reg, 640 Register value_reg,
677 Register scratch1, 641 Register scratch1,
678 Register scratch2, 642 Register scratch2,
679 Label* miss_label) { 643 Label* miss_label) {
680 // r0 : value 644 // r0 : value
681 Label exit; 645 Label exit;
682 646
683 // Check that the map of the object hasn't changed.
684 __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
685 DO_SMI_CHECK);
686
687 // Perform global security token check if needed.
688 if (object->IsJSGlobalProxy()) {
689 __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
690 }
691
692 // Stub never generated for non-global objects that require access 647 // Stub never generated for non-global objects that require access
693 // checks. 648 // checks.
694 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 649 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
695 650
696 int index = lookup->GetFieldIndex().field_index(); 651 int index = lookup->GetFieldIndex().field_index();
697 652
698 // Adjust for the number of properties stored in the object. Even in the 653 // Adjust for the number of properties stored in the object. Even in the
699 // face of a transition we can use the old map here because the size of the 654 // face of a transition we can use the old map here because the size of the
700 // object and the number of in-object properties is not going to change. 655 // object and the number of in-object properties is not going to change.
701 index -= object->map()->inobject_properties(); 656 index -= object->map()->inobject_properties();
(...skipping 633 matching lines...) Expand 10 before | Expand all | Expand 10 after
1335 // If we've skipped any global objects, it's not enough to verify that 1290 // If we've skipped any global objects, it's not enough to verify that
1336 // their maps haven't changed. We also need to check that the property 1291 // their maps haven't changed. We also need to check that the property
1337 // cell for the property is still empty. 1292 // cell for the property is still empty.
1338 GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); 1293 GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss);
1339 1294
1340 // Return the register containing the holder. 1295 // Return the register containing the holder.
1341 return reg; 1296 return reg;
1342 } 1297 }
1343 1298
1344 1299
1345 void BaseLoadStubCompiler::HandlerFrontendFooter(Label* success, 1300 void BaseLoadStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1301 Label* success,
1346 Label* miss) { 1302 Label* miss) {
1347 if (!miss->is_unused()) { 1303 if (!miss->is_unused()) {
1348 __ b(success); 1304 __ b(success);
1349 __ bind(miss); 1305 __ bind(miss);
1350 TailCallBuiltin(masm(), MissBuiltin(kind())); 1306 TailCallBuiltin(masm(), MissBuiltin(kind()));
1351 } 1307 }
1352 } 1308 }
1353 1309
1354 1310
1311 void BaseStoreStubCompiler::HandlerFrontendFooter(Handle<Name> name,
1312 Label* success,
1313 Label* miss) {
1314 if (!miss->is_unused()) {
1315 __ b(success);
1316 __ bind(miss);
ulan 2013/07/09 08:08:39 Using GenerateRestoreName instead these two lines
Toon Verwaest 2013/07/09 08:22:26 Done.
1317 __ mov(this->name(), Operand(name));
1318 TailCallBuiltin(masm(), MissBuiltin(kind()));
1319 }
1320 }
1321
1322
1355 Register BaseLoadStubCompiler::CallbackHandlerFrontend( 1323 Register BaseLoadStubCompiler::CallbackHandlerFrontend(
1356 Handle<JSObject> object, 1324 Handle<JSObject> object,
1357 Register object_reg, 1325 Register object_reg,
1358 Handle<JSObject> holder, 1326 Handle<JSObject> holder,
1359 Handle<Name> name, 1327 Handle<Name> name,
1360 Label* success, 1328 Label* success,
1361 Handle<ExecutableAccessorInfo> callback) { 1329 Handle<ExecutableAccessorInfo> callback) {
1362 Label miss; 1330 Label miss;
1363 1331
1364 Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss); 1332 Register reg = HandlerFrontendHeader(object, object_reg, holder, name, &miss);
(...skipping 22 matching lines...) Expand all
1387 // pointer into the dictionary. Check that the value is the callback. 1355 // pointer into the dictionary. Check that the value is the callback.
1388 Register pointer = scratch3(); 1356 Register pointer = scratch3();
1389 const int kElementsStartOffset = NameDictionary::kHeaderSize + 1357 const int kElementsStartOffset = NameDictionary::kHeaderSize +
1390 NameDictionary::kElementsStartIndex * kPointerSize; 1358 NameDictionary::kElementsStartIndex * kPointerSize;
1391 const int kValueOffset = kElementsStartOffset + kPointerSize; 1359 const int kValueOffset = kElementsStartOffset + kPointerSize;
1392 __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset)); 1360 __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
1393 __ cmp(scratch2(), Operand(callback)); 1361 __ cmp(scratch2(), Operand(callback));
1394 __ b(ne, &miss); 1362 __ b(ne, &miss);
1395 } 1363 }
1396 1364
1397 HandlerFrontendFooter(success, &miss); 1365 HandlerFrontendFooter(name, success, &miss);
1398 return reg; 1366 return reg;
1399 } 1367 }
1400 1368
1401 1369
1402 void BaseLoadStubCompiler::NonexistentHandlerFrontend( 1370 void BaseLoadStubCompiler::NonexistentHandlerFrontend(
1403 Handle<JSObject> object, 1371 Handle<JSObject> object,
1404 Handle<JSObject> last, 1372 Handle<JSObject> last,
1405 Handle<Name> name, 1373 Handle<Name> name,
1406 Label* success, 1374 Label* success,
1407 Handle<GlobalObject> global) { 1375 Handle<GlobalObject> global) {
1408 Label miss; 1376 Label miss;
1409 1377
1410 HandlerFrontendHeader(object, receiver(), last, name, &miss); 1378 HandlerFrontendHeader(object, receiver(), last, name, &miss);
1411 1379
1412 // If the last object in the prototype chain is a global object, 1380 // If the last object in the prototype chain is a global object,
1413 // check that the global property cell is empty. 1381 // check that the global property cell is empty.
1414 if (!global.is_null()) { 1382 if (!global.is_null()) {
1415 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss); 1383 GenerateCheckPropertyCell(masm(), global, name, scratch2(), &miss);
1416 } 1384 }
1417 1385
1418 HandlerFrontendFooter(success, &miss); 1386 HandlerFrontendFooter(name, success, &miss);
1419 } 1387 }
1420 1388
1421 1389
1422 void BaseLoadStubCompiler::GenerateLoadField(Register reg, 1390 void BaseLoadStubCompiler::GenerateLoadField(Register reg,
1423 Handle<JSObject> holder, 1391 Handle<JSObject> holder,
1424 PropertyIndex field, 1392 PropertyIndex field,
1425 Representation representation) { 1393 Representation representation) {
1426 if (!reg.is(receiver())) __ mov(receiver(), reg); 1394 if (!reg.is(receiver())) __ mov(receiver(), reg);
1427 if (kind() == Code::LOAD_IC) { 1395 if (kind() == Code::LOAD_IC) {
1428 LoadFieldStub stub(field.is_inobject(holder), 1396 LoadFieldStub stub(field.is_inobject(holder),
(...skipping 1388 matching lines...) Expand 10 before | Expand all | Expand 10 after
2817 __ bind(&miss); 2785 __ bind(&miss);
2818 __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3); 2786 __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
2819 GenerateMissBranch(); 2787 GenerateMissBranch();
2820 2788
2821 // Return the generated code. 2789 // Return the generated code.
2822 return GetCode(Code::NORMAL, name); 2790 return GetCode(Code::NORMAL, name);
2823 } 2791 }
2824 2792
2825 2793
2826 Handle<Code> StoreStubCompiler::CompileStoreCallback( 2794 Handle<Code> StoreStubCompiler::CompileStoreCallback(
2827 Handle<Name> name,
2828 Handle<JSObject> object, 2795 Handle<JSObject> object,
2829 Handle<JSObject> holder, 2796 Handle<JSObject> holder,
2797 Handle<Name> name,
2830 Handle<ExecutableAccessorInfo> callback) { 2798 Handle<ExecutableAccessorInfo> callback) {
2831 Label miss; 2799 Label success;
2832 // Check that the maps haven't changed. 2800 HandlerFrontend(object, receiver(), holder, name, &success);
2833 __ JumpIfSmi(receiver(), &miss); 2801 __ bind(&success);
2834 CheckPrototypes(object, receiver(), holder,
2835 scratch1(), scratch2(), scratch3(), name, &miss);
2836 2802
2837 // Stub never generated for non-global objects that require access checks. 2803 // Stub never generated for non-global objects that require access checks.
2838 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 2804 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2839 2805
2840 __ push(receiver()); // receiver 2806 __ push(receiver()); // receiver
2841 __ mov(ip, Operand(callback)); // callback info 2807 __ mov(ip, Operand(callback)); // callback info
2842 __ Push(ip, this->name(), value()); 2808 __ push(ip);
2809 __ mov(ip, Operand(name));
2810 __ Push(ip, value());
2843 2811
2844 // Do tail-call to the runtime system. 2812 // Do tail-call to the runtime system.
2845 ExternalReference store_callback_property = 2813 ExternalReference store_callback_property =
2846 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); 2814 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2847 __ TailCallExternalReference(store_callback_property, 4, 1); 2815 __ TailCallExternalReference(store_callback_property, 4, 1);
2848 2816
2849 // Handle store cache miss.
2850 __ bind(&miss);
2851 TailCallBuiltin(masm(), MissBuiltin(kind()));
2852
2853 // Return the generated code. 2817 // Return the generated code.
2854 return GetICCode(kind(), Code::CALLBACKS, name); 2818 return GetCode(kind(), Code::CALLBACKS, name);
2855 } 2819 }
2856 2820
2857 2821
2858 #undef __ 2822 #undef __
2859 #define __ ACCESS_MASM(masm) 2823 #define __ ACCESS_MASM(masm)
2860 2824
2861 2825
2862 void StoreStubCompiler::GenerateStoreViaSetter( 2826 void StoreStubCompiler::GenerateStoreViaSetter(
2863 MacroAssembler* masm, 2827 MacroAssembler* masm,
2864 Handle<JSFunction> setter) { 2828 Handle<JSFunction> setter) {
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after
3098 __ mov(r3, Operand(cell)); 3062 __ mov(r3, Operand(cell));
3099 __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset)); 3063 __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset));
3100 3064
3101 // Check for deleted property if property can actually be deleted. 3065 // Check for deleted property if property can actually be deleted.
3102 if (!is_dont_delete) { 3066 if (!is_dont_delete) {
3103 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 3067 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3104 __ cmp(r4, ip); 3068 __ cmp(r4, ip);
3105 __ b(eq, &miss); 3069 __ b(eq, &miss);
3106 } 3070 }
3107 3071
3108 HandlerFrontendFooter(&success, &miss); 3072 HandlerFrontendFooter(name, &success, &miss);
3109 __ bind(&success); 3073 __ bind(&success);
3110 3074
3111 Counters* counters = isolate()->counters(); 3075 Counters* counters = isolate()->counters();
3112 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3); 3076 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
3113 __ mov(r0, r4); 3077 __ mov(r0, r4);
3114 __ Ret(); 3078 __ Ret();
3115 3079
3116 // Return the generated code. 3080 // Return the generated code.
3117 return GetICCode(kind(), Code::NORMAL, name); 3081 return GetICCode(kind(), Code::NORMAL, name);
3118 } 3082 }
3119 3083
3120 3084
3121 Handle<Code> BaseLoadStubCompiler::CompilePolymorphicIC( 3085 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
3122 MapHandleList* receiver_maps, 3086 MapHandleList* receiver_maps,
3123 CodeHandleList* handlers, 3087 CodeHandleList* handlers,
3124 Handle<Name> name, 3088 Handle<Name> name,
3125 Code::StubType type, 3089 Code::StubType type,
3126 IcCheckType check) { 3090 IcCheckType check) {
3127 Label miss; 3091 Label miss;
3128 3092
3129 if (check == PROPERTY) { 3093 if (check == PROPERTY) {
3130 GenerateNameCheck(name, this->name(), &miss); 3094 GenerateNameCheck(name, this->name(), &miss);
3131 } 3095 }
(...skipping 603 matching lines...) Expand 10 before | Expand all | Expand 10 after
3735 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); 3699 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
3736 } 3700 }
3737 } 3701 }
3738 3702
3739 3703
3740 #undef __ 3704 #undef __
3741 3705
3742 } } // namespace v8::internal 3706 } } // namespace v8::internal
3743 3707
3744 #endif // V8_TARGET_ARCH_ARM 3708 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/ic-arm.cc ('k') | src/ast.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698