Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(249)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 9845019: Port count-based profiler to x64 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: addressed comments Created 8 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/deoptimizer-x64.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 16 matching lines...) Expand all
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "isolate-inl.h"
37 #include "parser.h" 38 #include "parser.h"
38 #include "scopes.h" 39 #include "scopes.h"
39 #include "stub-cache.h" 40 #include "stub-cache.h"
40 41
41 namespace v8 { 42 namespace v8 {
42 namespace internal { 43 namespace internal {
43 44
44 #define __ ACCESS_MASM(masm_) 45 #define __ ACCESS_MASM(masm_)
45 46
46 47
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
93 } 94 }
94 95
95 MacroAssembler* masm_; 96 MacroAssembler* masm_;
96 Label patch_site_; 97 Label patch_site_;
97 #ifdef DEBUG 98 #ifdef DEBUG
98 bool info_emitted_; 99 bool info_emitted_;
99 #endif 100 #endif
100 }; 101 };
101 102
102 103
103 int FullCodeGenerator::self_optimization_header_size() {
104 return 20;
105 }
106
107
108 // Generate code for a JS function. On entry to the function the receiver 104 // Generate code for a JS function. On entry to the function the receiver
109 // and arguments have been pushed on the stack left to right, with the 105 // and arguments have been pushed on the stack left to right, with the
110 // return address on top of them. The actual argument count matches the 106 // return address on top of them. The actual argument count matches the
111 // formal parameter count expected by the function. 107 // formal parameter count expected by the function.
112 // 108 //
113 // The live registers are: 109 // The live registers are:
114 // o rdi: the JS function object being called (i.e. ourselves) 110 // o rdi: the JS function object being called (i.e. ourselves)
115 // o rsi: our context 111 // o rsi: our context
116 // o rbp: our caller's frame pointer 112 // o rbp: our caller's frame pointer
117 // o rsp: stack pointer (pointing to return address) 113 // o rsp: stack pointer (pointing to return address)
118 // 114 //
119 // The function builds a JS frame. Please see JavaScriptFrameConstants in 115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
120 // frames-x64.h for its layout. 116 // frames-x64.h for its layout.
121 void FullCodeGenerator::Generate() { 117 void FullCodeGenerator::Generate() {
122 CompilationInfo* info = info_; 118 CompilationInfo* info = info_;
123 handler_table_ = 119 handler_table_ =
124 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 120 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
122 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
125 SetFunctionPosition(function()); 123 SetFunctionPosition(function());
126 Comment cmnt(masm_, "[ function compiled by full code generator"); 124 Comment cmnt(masm_, "[ function compiled by full code generator");
127 125
128 // We can optionally optimize based on counters rather than statistical
129 // sampling.
130 if (info->ShouldSelfOptimize()) {
131 if (FLAG_trace_opt_verbose) {
132 PrintF("[adding self-optimization header to %s]\n",
133 *info->function()->debug_name()->ToCString());
134 }
135 has_self_optimization_header_ = true;
136 MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
137 Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
138 JSGlobalPropertyCell* cell;
139 if (maybe_cell->To(&cell)) {
140 __ movq(rax, Handle<JSGlobalPropertyCell>(cell),
141 RelocInfo::EMBEDDED_OBJECT);
142 __ SmiAddConstant(FieldOperand(rax, JSGlobalPropertyCell::kValueOffset),
143 Smi::FromInt(-1));
144 Handle<Code> compile_stub(
145 isolate()->builtins()->builtin(Builtins::kLazyRecompile));
146 __ j(zero, compile_stub, RelocInfo::CODE_TARGET);
147 ASSERT(masm_->pc_offset() == self_optimization_header_size());
148 }
149 }
150
151 #ifdef DEBUG 126 #ifdef DEBUG
152 if (strlen(FLAG_stop_at) > 0 && 127 if (strlen(FLAG_stop_at) > 0 &&
153 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 128 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
154 __ int3(); 129 __ int3();
155 } 130 }
156 #endif 131 #endif
157 132
158 // Strict mode functions and builtins need to replace the receiver 133 // Strict mode functions and builtins need to replace the receiver
159 // with undefined when called as functions (without an explicit 134 // with undefined when called as functions (without an explicit
160 // receiver object). rcx is zero for method calls and non-zero for 135 // receiver object). rcx is zero for method calls and non-zero for
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
315 EmitReturnSequence(); 290 EmitReturnSequence();
316 } 291 }
317 } 292 }
318 293
319 294
320 void FullCodeGenerator::ClearAccumulator() { 295 void FullCodeGenerator::ClearAccumulator() {
321 __ Set(rax, 0); 296 __ Set(rax, 0);
322 } 297 }
323 298
324 299
300 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
301 __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
302 __ SmiAddConstant(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
303 Smi::FromInt(-delta));
304 }
305
306
307 void FullCodeGenerator::EmitProfilingCounterReset() {
308 int reset_value = FLAG_interrupt_budget;
309 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
310 // Self-optimization is a one-off thing; if it fails, don't try again.
311 reset_value = Smi::kMaxValue;
312 }
313 if (isolate()->IsDebuggerActive()) {
314 // Detect debug break requests as soon as possible.
315 reset_value = 10;
316 }
317 __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
318 __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
319 Smi::FromInt(reset_value));
320 }
321
322
323 static const int kMaxBackEdgeWeight = 127;
324 static const int kBackEdgeDistanceDivisor = 162;
325
326
325 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, 327 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
326 Label* back_edge_target) { 328 Label* back_edge_target) {
327 Comment cmnt(masm_, "[ Stack check"); 329 Comment cmnt(masm_, "[ Stack check");
328 Label ok; 330 Label ok;
329 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 331
330 __ j(above_equal, &ok, Label::kNear); 332 if (FLAG_count_based_interrupts) {
331 StackCheckStub stub; 333 int weight = 1;
332 __ CallStub(&stub); 334 if (FLAG_weighted_back_edges) {
335 ASSERT(back_edge_target->is_bound());
336 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
337 weight = Min(kMaxBackEdgeWeight,
338 Max(1, distance / kBackEdgeDistanceDivisor));
339 }
340 EmitProfilingCounterDecrement(weight);
341 __ j(positive, &ok, Label::kNear);
342 InterruptStub stub;
343 __ CallStub(&stub);
344 } else {
345 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
346 __ j(above_equal, &ok, Label::kNear);
347 StackCheckStub stub;
348 __ CallStub(&stub);
349 }
350
333 // Record a mapping of this PC offset to the OSR id. This is used to find 351 // Record a mapping of this PC offset to the OSR id. This is used to find
334 // the AST id from the unoptimized code in order to use it as a key into 352 // the AST id from the unoptimized code in order to use it as a key into
335 // the deoptimization input data found in the optimized code. 353 // the deoptimization input data found in the optimized code.
336 RecordStackCheck(stmt->OsrEntryId()); 354 RecordStackCheck(stmt->OsrEntryId());
337 355
338 // Loop stack checks can be patched to perform on-stack replacement. In 356 // Loop stack checks can be patched to perform on-stack replacement. In
339 // order to decide whether or not to perform OSR we embed the loop depth 357 // order to decide whether or not to perform OSR we embed the loop depth
340 // in a test instruction after the call so we can extract it from the OSR 358 // in a test instruction after the call so we can extract it from the OSR
341 // builtin. 359 // builtin.
342 ASSERT(loop_depth() > 0); 360 ASSERT(loop_depth() > 0);
343 __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker))); 361 __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
344 362
363 if (FLAG_count_based_interrupts) {
364 EmitProfilingCounterReset();
365 }
366
345 __ bind(&ok); 367 __ bind(&ok);
346 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 368 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
347 // Record a mapping of the OSR id to this PC. This is used if the OSR 369 // Record a mapping of the OSR id to this PC. This is used if the OSR
348 // entry becomes the target of a bailout. We don't expect it to be, but 370 // entry becomes the target of a bailout. We don't expect it to be, but
349 // we want it to work if it is. 371 // we want it to work if it is.
350 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 372 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
351 } 373 }
352 374
353 375
354 void FullCodeGenerator::EmitReturnSequence() { 376 void FullCodeGenerator::EmitReturnSequence() {
355 Comment cmnt(masm_, "[ Return sequence"); 377 Comment cmnt(masm_, "[ Return sequence");
356 if (return_label_.is_bound()) { 378 if (return_label_.is_bound()) {
357 __ jmp(&return_label_); 379 __ jmp(&return_label_);
358 } else { 380 } else {
359 __ bind(&return_label_); 381 __ bind(&return_label_);
360 if (FLAG_trace) { 382 if (FLAG_trace) {
361 __ push(rax); 383 __ push(rax);
362 __ CallRuntime(Runtime::kTraceExit, 1); 384 __ CallRuntime(Runtime::kTraceExit, 1);
363 } 385 }
386 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
387 // Pretend that the exit is a backwards jump to the entry.
388 int weight = 1;
389 if (info_->ShouldSelfOptimize()) {
390 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
391 } else if (FLAG_weighted_back_edges) {
392 int distance = masm_->pc_offset();
393 weight = Min(kMaxBackEdgeWeight,
394 Max(1, distance = kBackEdgeDistanceDivisor));
395 }
396 EmitProfilingCounterDecrement(weight);
397 Label ok;
398 __ j(positive, &ok, Label::kNear);
399 __ push(rax);
400 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
401 __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
402 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
403 } else {
404 InterruptStub stub;
405 __ CallStub(&stub);
406 }
407 __ pop(rax);
408 EmitProfilingCounterReset();
409 __ bind(&ok);
410 }
364 #ifdef DEBUG 411 #ifdef DEBUG
365 // Add a label for checking the size of the code used for returning. 412 // Add a label for checking the size of the code used for returning.
366 Label check_exit_codesize; 413 Label check_exit_codesize;
367 masm_->bind(&check_exit_codesize); 414 masm_->bind(&check_exit_codesize);
368 #endif 415 #endif
369 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 416 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
370 __ RecordJSReturn(); 417 __ RecordJSReturn();
371 // Do not use the leave instruction here because it is too short to 418 // Do not use the leave instruction here because it is too short to
372 // patch with the code required by the debugger. 419 // patch with the code required by the debugger.
373 __ movq(rsp, rbp); 420 __ movq(rsp, rbp);
(...skipping 475 matching lines...) Expand 10 before | Expand all | Expand 10 after
849 __ cmpq(rdx, rax); 896 __ cmpq(rdx, rax);
850 __ j(not_equal, &next_test); 897 __ j(not_equal, &next_test);
851 __ Drop(1); // Switch value is no longer needed. 898 __ Drop(1); // Switch value is no longer needed.
852 __ jmp(clause->body_target()); 899 __ jmp(clause->body_target());
853 __ bind(&slow_case); 900 __ bind(&slow_case);
854 } 901 }
855 902
856 // Record position before stub call for type feedback. 903 // Record position before stub call for type feedback.
857 SetSourcePosition(clause->position()); 904 SetSourcePosition(clause->position());
858 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); 905 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
859 __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId()); 906 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
860 patch_site.EmitPatchInfo(); 907 patch_site.EmitPatchInfo();
861 908
862 __ testq(rax, rax); 909 __ testq(rax, rax);
863 __ j(not_equal, &next_test); 910 __ j(not_equal, &next_test);
864 __ Drop(1); // Switch value is no longer needed. 911 __ Drop(1); // Switch value is no longer needed.
865 __ jmp(clause->body_target()); 912 __ jmp(clause->body_target());
866 } 913 }
867 914
868 // Discard the test value and jump to the default if present, otherwise to 915 // Discard the test value and jump to the default if present, otherwise to
869 // the end of the statement. 916 // the end of the statement.
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
1148 } 1195 }
1149 1196
1150 // All extension objects were empty and it is safe to use a global 1197 // All extension objects were empty and it is safe to use a global
1151 // load IC call. 1198 // load IC call.
1152 __ movq(rax, GlobalObjectOperand()); 1199 __ movq(rax, GlobalObjectOperand());
1153 __ Move(rcx, var->name()); 1200 __ Move(rcx, var->name());
1154 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1201 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1155 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1202 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1156 ? RelocInfo::CODE_TARGET 1203 ? RelocInfo::CODE_TARGET
1157 : RelocInfo::CODE_TARGET_CONTEXT; 1204 : RelocInfo::CODE_TARGET_CONTEXT;
1158 __ call(ic, mode); 1205 CallIC(ic, mode);
1159 } 1206 }
1160 1207
1161 1208
1162 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1209 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1163 Label* slow) { 1210 Label* slow) {
1164 ASSERT(var->IsContextSlot()); 1211 ASSERT(var->IsContextSlot());
1165 Register context = rsi; 1212 Register context = rsi;
1166 Register temp = rbx; 1213 Register temp = rbx;
1167 1214
1168 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1215 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
1229 // Three cases: global variables, lookup variables, and all other types of 1276 // Three cases: global variables, lookup variables, and all other types of
1230 // variables. 1277 // variables.
1231 switch (var->location()) { 1278 switch (var->location()) {
1232 case Variable::UNALLOCATED: { 1279 case Variable::UNALLOCATED: {
1233 Comment cmnt(masm_, "Global variable"); 1280 Comment cmnt(masm_, "Global variable");
1234 // Use inline caching. Variable name is passed in rcx and the global 1281 // Use inline caching. Variable name is passed in rcx and the global
1235 // object on the stack. 1282 // object on the stack.
1236 __ Move(rcx, var->name()); 1283 __ Move(rcx, var->name());
1237 __ movq(rax, GlobalObjectOperand()); 1284 __ movq(rax, GlobalObjectOperand());
1238 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1285 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1239 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1286 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1240 context()->Plug(rax); 1287 context()->Plug(rax);
1241 break; 1288 break;
1242 } 1289 }
1243 1290
1244 case Variable::PARAMETER: 1291 case Variable::PARAMETER:
1245 case Variable::LOCAL: 1292 case Variable::LOCAL:
1246 case Variable::CONTEXT: { 1293 case Variable::CONTEXT: {
1247 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot"); 1294 Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1248 if (var->binding_needs_init()) { 1295 if (var->binding_needs_init()) {
1249 // var->scope() may be NULL when the proxy is located in eval code and 1296 // var->scope() may be NULL when the proxy is located in eval code and
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after
1439 // Fall through. 1486 // Fall through.
1440 case ObjectLiteral::Property::COMPUTED: 1487 case ObjectLiteral::Property::COMPUTED:
1441 if (key->handle()->IsSymbol()) { 1488 if (key->handle()->IsSymbol()) {
1442 if (property->emit_store()) { 1489 if (property->emit_store()) {
1443 VisitForAccumulatorValue(value); 1490 VisitForAccumulatorValue(value);
1444 __ Move(rcx, key->handle()); 1491 __ Move(rcx, key->handle());
1445 __ movq(rdx, Operand(rsp, 0)); 1492 __ movq(rdx, Operand(rsp, 0));
1446 Handle<Code> ic = is_classic_mode() 1493 Handle<Code> ic = is_classic_mode()
1447 ? isolate()->builtins()->StoreIC_Initialize() 1494 ? isolate()->builtins()->StoreIC_Initialize()
1448 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1495 : isolate()->builtins()->StoreIC_Initialize_Strict();
1449 __ call(ic, RelocInfo::CODE_TARGET, key->id()); 1496 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1450 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1497 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1451 } else { 1498 } else {
1452 VisitForEffect(value); 1499 VisitForEffect(value);
1453 } 1500 }
1454 break; 1501 break;
1455 } 1502 }
1456 // Fall through. 1503 // Fall through.
1457 case ObjectLiteral::Property::PROTOTYPE: 1504 case ObjectLiteral::Property::PROTOTYPE:
1458 __ push(Operand(rsp, 0)); // Duplicate receiver. 1505 __ push(Operand(rsp, 0)); // Duplicate receiver.
1459 VisitForStackValue(key); 1506 VisitForStackValue(key);
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after
1709 break; 1756 break;
1710 } 1757 }
1711 } 1758 }
1712 1759
1713 1760
1714 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1761 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1715 SetSourcePosition(prop->position()); 1762 SetSourcePosition(prop->position());
1716 Literal* key = prop->key()->AsLiteral(); 1763 Literal* key = prop->key()->AsLiteral();
1717 __ Move(rcx, key->handle()); 1764 __ Move(rcx, key->handle());
1718 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1765 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1719 __ call(ic, RelocInfo::CODE_TARGET, prop->id()); 1766 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1720 } 1767 }
1721 1768
1722 1769
1723 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1770 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1724 SetSourcePosition(prop->position()); 1771 SetSourcePosition(prop->position());
1725 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 1772 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1726 __ call(ic, RelocInfo::CODE_TARGET, prop->id()); 1773 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1727 } 1774 }
1728 1775
1729 1776
1730 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1777 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1731 Token::Value op, 1778 Token::Value op,
1732 OverwriteMode mode, 1779 OverwriteMode mode,
1733 Expression* left, 1780 Expression* left,
1734 Expression* right) { 1781 Expression* right) {
1735 // Do combined smi check of the operands. Left operand is on the 1782 // Do combined smi check of the operands. Left operand is on the
1736 // stack (popped into rdx). Right operand is in rax but moved into 1783 // stack (popped into rdx). Right operand is in rax but moved into
1737 // rcx to make the shifts easier. 1784 // rcx to make the shifts easier.
1738 Label done, stub_call, smi_case; 1785 Label done, stub_call, smi_case;
1739 __ pop(rdx); 1786 __ pop(rdx);
1740 __ movq(rcx, rax); 1787 __ movq(rcx, rax);
1741 __ or_(rax, rdx); 1788 __ or_(rax, rdx);
1742 JumpPatchSite patch_site(masm_); 1789 JumpPatchSite patch_site(masm_);
1743 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); 1790 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1744 1791
1745 __ bind(&stub_call); 1792 __ bind(&stub_call);
1746 __ movq(rax, rcx); 1793 __ movq(rax, rcx);
1747 BinaryOpStub stub(op, mode); 1794 BinaryOpStub stub(op, mode);
1748 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); 1795 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1749 patch_site.EmitPatchInfo(); 1796 patch_site.EmitPatchInfo();
1750 __ jmp(&done, Label::kNear); 1797 __ jmp(&done, Label::kNear);
1751 1798
1752 __ bind(&smi_case); 1799 __ bind(&smi_case);
1753 switch (op) { 1800 switch (op) {
1754 case Token::SAR: 1801 case Token::SAR:
1755 __ SmiShiftArithmeticRight(rax, rdx, rcx); 1802 __ SmiShiftArithmeticRight(rax, rdx, rcx);
1756 break; 1803 break;
1757 case Token::SHL: 1804 case Token::SHL:
1758 __ SmiShiftLeft(rax, rdx, rcx); 1805 __ SmiShiftLeft(rax, rdx, rcx);
(...skipping 28 matching lines...) Expand all
1787 context()->Plug(rax); 1834 context()->Plug(rax);
1788 } 1835 }
1789 1836
1790 1837
1791 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 1838 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1792 Token::Value op, 1839 Token::Value op,
1793 OverwriteMode mode) { 1840 OverwriteMode mode) {
1794 __ pop(rdx); 1841 __ pop(rdx);
1795 BinaryOpStub stub(op, mode); 1842 BinaryOpStub stub(op, mode);
1796 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 1843 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1797 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); 1844 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1798 patch_site.EmitPatchInfo(); 1845 patch_site.EmitPatchInfo();
1799 context()->Plug(rax); 1846 context()->Plug(rax);
1800 } 1847 }
1801 1848
1802 1849
1803 void FullCodeGenerator::EmitAssignment(Expression* expr) { 1850 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1804 // Invalid left-hand sides are rewritten to have a 'throw 1851 // Invalid left-hand sides are rewritten to have a 'throw
1805 // ReferenceError' on the left-hand side. 1852 // ReferenceError' on the left-hand side.
1806 if (!expr->IsValidLeftHandSide()) { 1853 if (!expr->IsValidLeftHandSide()) {
1807 VisitForEffect(expr); 1854 VisitForEffect(expr);
(...skipping 20 matching lines...) Expand all
1828 } 1875 }
1829 case NAMED_PROPERTY: { 1876 case NAMED_PROPERTY: {
1830 __ push(rax); // Preserve value. 1877 __ push(rax); // Preserve value.
1831 VisitForAccumulatorValue(prop->obj()); 1878 VisitForAccumulatorValue(prop->obj());
1832 __ movq(rdx, rax); 1879 __ movq(rdx, rax);
1833 __ pop(rax); // Restore value. 1880 __ pop(rax); // Restore value.
1834 __ Move(rcx, prop->key()->AsLiteral()->handle()); 1881 __ Move(rcx, prop->key()->AsLiteral()->handle());
1835 Handle<Code> ic = is_classic_mode() 1882 Handle<Code> ic = is_classic_mode()
1836 ? isolate()->builtins()->StoreIC_Initialize() 1883 ? isolate()->builtins()->StoreIC_Initialize()
1837 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1884 : isolate()->builtins()->StoreIC_Initialize_Strict();
1838 __ call(ic); 1885 CallIC(ic);
1839 break; 1886 break;
1840 } 1887 }
1841 case KEYED_PROPERTY: { 1888 case KEYED_PROPERTY: {
1842 __ push(rax); // Preserve value. 1889 __ push(rax); // Preserve value.
1843 VisitForStackValue(prop->obj()); 1890 VisitForStackValue(prop->obj());
1844 VisitForAccumulatorValue(prop->key()); 1891 VisitForAccumulatorValue(prop->key());
1845 __ movq(rcx, rax); 1892 __ movq(rcx, rax);
1846 __ pop(rdx); 1893 __ pop(rdx);
1847 __ pop(rax); // Restore value. 1894 __ pop(rax); // Restore value.
1848 Handle<Code> ic = is_classic_mode() 1895 Handle<Code> ic = is_classic_mode()
1849 ? isolate()->builtins()->KeyedStoreIC_Initialize() 1896 ? isolate()->builtins()->KeyedStoreIC_Initialize()
1850 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 1897 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1851 __ call(ic); 1898 CallIC(ic);
1852 break; 1899 break;
1853 } 1900 }
1854 } 1901 }
1855 context()->Plug(rax); 1902 context()->Plug(rax);
1856 } 1903 }
1857 1904
1858 1905
1859 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1906 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1860 Token::Value op) { 1907 Token::Value op) {
1861 if (var->IsUnallocated()) { 1908 if (var->IsUnallocated()) {
1862 // Global var, const, or let. 1909 // Global var, const, or let.
1863 __ Move(rcx, var->name()); 1910 __ Move(rcx, var->name());
1864 __ movq(rdx, GlobalObjectOperand()); 1911 __ movq(rdx, GlobalObjectOperand());
1865 Handle<Code> ic = is_classic_mode() 1912 Handle<Code> ic = is_classic_mode()
1866 ? isolate()->builtins()->StoreIC_Initialize() 1913 ? isolate()->builtins()->StoreIC_Initialize()
1867 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1914 : isolate()->builtins()->StoreIC_Initialize_Strict();
1868 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1915 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1869 } else if (op == Token::INIT_CONST) { 1916 } else if (op == Token::INIT_CONST) {
1870 // Const initializers need a write barrier. 1917 // Const initializers need a write barrier.
1871 ASSERT(!var->IsParameter()); // No const parameters. 1918 ASSERT(!var->IsParameter()); // No const parameters.
1872 if (var->IsStackLocal()) { 1919 if (var->IsStackLocal()) {
1873 Label skip; 1920 Label skip;
1874 __ movq(rdx, StackOperand(var)); 1921 __ movq(rdx, StackOperand(var));
1875 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 1922 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1876 __ j(not_equal, &skip); 1923 __ j(not_equal, &skip);
1877 __ movq(StackOperand(var), rax); 1924 __ movq(StackOperand(var), rax);
1878 __ bind(&skip); 1925 __ bind(&skip);
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1966 SetSourcePosition(expr->position()); 2013 SetSourcePosition(expr->position());
1967 __ Move(rcx, prop->key()->AsLiteral()->handle()); 2014 __ Move(rcx, prop->key()->AsLiteral()->handle());
1968 if (expr->ends_initialization_block()) { 2015 if (expr->ends_initialization_block()) {
1969 __ movq(rdx, Operand(rsp, 0)); 2016 __ movq(rdx, Operand(rsp, 0));
1970 } else { 2017 } else {
1971 __ pop(rdx); 2018 __ pop(rdx);
1972 } 2019 }
1973 Handle<Code> ic = is_classic_mode() 2020 Handle<Code> ic = is_classic_mode()
1974 ? isolate()->builtins()->StoreIC_Initialize() 2021 ? isolate()->builtins()->StoreIC_Initialize()
1975 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2022 : isolate()->builtins()->StoreIC_Initialize_Strict();
1976 __ call(ic, RelocInfo::CODE_TARGET, expr->id()); 2023 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
1977 2024
1978 // If the assignment ends an initialization block, revert to fast case. 2025 // If the assignment ends an initialization block, revert to fast case.
1979 if (expr->ends_initialization_block()) { 2026 if (expr->ends_initialization_block()) {
1980 __ push(rax); // Result of assignment, saved even if not needed. 2027 __ push(rax); // Result of assignment, saved even if not needed.
1981 __ push(Operand(rsp, kPointerSize)); // Receiver is under value. 2028 __ push(Operand(rsp, kPointerSize)); // Receiver is under value.
1982 __ CallRuntime(Runtime::kToFastProperties, 1); 2029 __ CallRuntime(Runtime::kToFastProperties, 1);
1983 __ pop(rax); 2030 __ pop(rax);
1984 __ Drop(1); 2031 __ Drop(1);
1985 } 2032 }
1986 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2033 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
(...skipping 19 matching lines...) Expand all
2006 if (expr->ends_initialization_block()) { 2053 if (expr->ends_initialization_block()) {
2007 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later. 2054 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
2008 } else { 2055 } else {
2009 __ pop(rdx); 2056 __ pop(rdx);
2010 } 2057 }
2011 // Record source code position before IC call. 2058 // Record source code position before IC call.
2012 SetSourcePosition(expr->position()); 2059 SetSourcePosition(expr->position());
2013 Handle<Code> ic = is_classic_mode() 2060 Handle<Code> ic = is_classic_mode()
2014 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2061 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2015 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2062 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2016 __ call(ic, RelocInfo::CODE_TARGET, expr->id()); 2063 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2017 2064
2018 // If the assignment ends an initialization block, revert to fast case. 2065 // If the assignment ends an initialization block, revert to fast case.
2019 if (expr->ends_initialization_block()) { 2066 if (expr->ends_initialization_block()) {
2020 __ pop(rdx); 2067 __ pop(rdx);
2021 __ push(rax); // Result of assignment, saved even if not needed. 2068 __ push(rax); // Result of assignment, saved even if not needed.
2022 __ push(rdx); 2069 __ push(rdx);
2023 __ CallRuntime(Runtime::kToFastProperties, 1); 2070 __ CallRuntime(Runtime::kToFastProperties, 1);
2024 __ pop(rax); 2071 __ pop(rax);
2025 } 2072 }
2026 2073
(...skipping 13 matching lines...) Expand all
2040 } else { 2087 } else {
2041 VisitForStackValue(expr->obj()); 2088 VisitForStackValue(expr->obj());
2042 VisitForAccumulatorValue(expr->key()); 2089 VisitForAccumulatorValue(expr->key());
2043 __ pop(rdx); 2090 __ pop(rdx);
2044 EmitKeyedPropertyLoad(expr); 2091 EmitKeyedPropertyLoad(expr);
2045 context()->Plug(rax); 2092 context()->Plug(rax);
2046 } 2093 }
2047 } 2094 }
2048 2095
2049 2096
2097 void FullCodeGenerator::CallIC(Handle<Code> code,
2098 RelocInfo::Mode rmode,
2099 unsigned ast_id) {
2100 ic_total_count_++;
2101 __ call(code, rmode, ast_id);
2102 }
2103
2104
2050 void FullCodeGenerator::EmitCallWithIC(Call* expr, 2105 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2051 Handle<Object> name, 2106 Handle<Object> name,
2052 RelocInfo::Mode mode) { 2107 RelocInfo::Mode mode) {
2053 // Code common for calls using the IC. 2108 // Code common for calls using the IC.
2054 ZoneList<Expression*>* args = expr->arguments(); 2109 ZoneList<Expression*>* args = expr->arguments();
2055 int arg_count = args->length(); 2110 int arg_count = args->length();
2056 { PreservePositionScope scope(masm()->positions_recorder()); 2111 { PreservePositionScope scope(masm()->positions_recorder());
2057 for (int i = 0; i < arg_count; i++) { 2112 for (int i = 0; i < arg_count; i++) {
2058 VisitForStackValue(args->at(i)); 2113 VisitForStackValue(args->at(i));
2059 } 2114 }
2060 __ Move(rcx, name); 2115 __ Move(rcx, name);
2061 } 2116 }
2062 // Record source position for debugger. 2117 // Record source position for debugger.
2063 SetSourcePosition(expr->position()); 2118 SetSourcePosition(expr->position());
2064 // Call the IC initialization code. 2119 // Call the IC initialization code.
2065 Handle<Code> ic = 2120 Handle<Code> ic =
2066 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 2121 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2067 __ call(ic, mode, expr->id()); 2122 CallIC(ic, mode, expr->id());
2068 RecordJSReturnSite(expr); 2123 RecordJSReturnSite(expr);
2069 // Restore context register. 2124 // Restore context register.
2070 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2125 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2071 context()->Plug(rax); 2126 context()->Plug(rax);
2072 } 2127 }
2073 2128
2074 2129
2075 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2130 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2076 Expression* key) { 2131 Expression* key) {
2077 // Load the key. 2132 // Load the key.
(...skipping 12 matching lines...) Expand all
2090 for (int i = 0; i < arg_count; i++) { 2145 for (int i = 0; i < arg_count; i++) {
2091 VisitForStackValue(args->at(i)); 2146 VisitForStackValue(args->at(i));
2092 } 2147 }
2093 } 2148 }
2094 // Record source position for debugger. 2149 // Record source position for debugger.
2095 SetSourcePosition(expr->position()); 2150 SetSourcePosition(expr->position());
2096 // Call the IC initialization code. 2151 // Call the IC initialization code.
2097 Handle<Code> ic = 2152 Handle<Code> ic =
2098 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); 2153 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2099 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key. 2154 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
2100 __ call(ic, RelocInfo::CODE_TARGET, expr->id()); 2155 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2101 RecordJSReturnSite(expr); 2156 RecordJSReturnSite(expr);
2102 // Restore context register. 2157 // Restore context register.
2103 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2158 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2104 context()->DropAndPlug(1, rax); // Drop the key still on the stack. 2159 context()->DropAndPlug(1, rax); // Drop the key still on the stack.
2105 } 2160 }
2106 2161
2107 2162
2108 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { 2163 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2109 // Code common for calls using the call stub. 2164 // Code common for calls using the call stub.
2110 ZoneList<Expression*>* args = expr->arguments(); 2165 ZoneList<Expression*>* args = expr->arguments();
(...skipping 1619 matching lines...) Expand 10 before | Expand all | Expand 10 after
3730 for (int i = 0; i < arg_count; i++) { 3785 for (int i = 0; i < arg_count; i++) {
3731 VisitForStackValue(args->at(i)); 3786 VisitForStackValue(args->at(i));
3732 } 3787 }
3733 3788
3734 if (expr->is_jsruntime()) { 3789 if (expr->is_jsruntime()) {
3735 // Call the JS runtime function using a call IC. 3790 // Call the JS runtime function using a call IC.
3736 __ Move(rcx, expr->name()); 3791 __ Move(rcx, expr->name());
3737 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 3792 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3738 Handle<Code> ic = 3793 Handle<Code> ic =
3739 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 3794 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3740 __ call(ic, mode, expr->id()); 3795 CallIC(ic, mode, expr->id());
3741 // Restore context register. 3796 // Restore context register.
3742 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3797 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3743 } else { 3798 } else {
3744 __ CallRuntime(expr->function(), arg_count); 3799 __ CallRuntime(expr->function(), arg_count);
3745 } 3800 }
3746 context()->Plug(rax); 3801 context()->Plug(rax);
3747 } 3802 }
3748 3803
3749 3804
3750 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 3805 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
3888 // TODO(svenpanne): Allowing format strings in Comment would be nice here... 3943 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3889 Comment cmt(masm_, comment); 3944 Comment cmt(masm_, comment);
3890 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); 3945 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3891 UnaryOverwriteMode overwrite = 3946 UnaryOverwriteMode overwrite =
3892 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 3947 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3893 UnaryOpStub stub(expr->op(), overwrite); 3948 UnaryOpStub stub(expr->op(), overwrite);
3894 // UnaryOpStub expects the argument to be in the 3949 // UnaryOpStub expects the argument to be in the
3895 // accumulator register rax. 3950 // accumulator register rax.
3896 VisitForAccumulatorValue(expr->expression()); 3951 VisitForAccumulatorValue(expr->expression());
3897 SetSourcePosition(expr->position()); 3952 SetSourcePosition(expr->position());
3898 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); 3953 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3899 context()->Plug(rax); 3954 context()->Plug(rax);
3900 } 3955 }
3901 3956
3902 3957
3903 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 3958 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3904 Comment cmnt(masm_, "[ CountOperation"); 3959 Comment cmnt(masm_, "[ CountOperation");
3905 SetSourcePosition(expr->position()); 3960 SetSourcePosition(expr->position());
3906 3961
3907 // Invalid left-hand-sides are rewritten to have a 'throw 3962 // Invalid left-hand-sides are rewritten to have a 'throw
3908 // ReferenceError' as the left-hand side. 3963 // ReferenceError' as the left-hand side.
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
4009 SetSourcePosition(expr->position()); 4064 SetSourcePosition(expr->position());
4010 4065
4011 // Call stub for +1/-1. 4066 // Call stub for +1/-1.
4012 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE); 4067 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4013 if (expr->op() == Token::INC) { 4068 if (expr->op() == Token::INC) {
4014 __ Move(rdx, Smi::FromInt(1)); 4069 __ Move(rdx, Smi::FromInt(1));
4015 } else { 4070 } else {
4016 __ movq(rdx, rax); 4071 __ movq(rdx, rax);
4017 __ Move(rax, Smi::FromInt(1)); 4072 __ Move(rax, Smi::FromInt(1));
4018 } 4073 }
4019 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId()); 4074 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4020 patch_site.EmitPatchInfo(); 4075 patch_site.EmitPatchInfo();
4021 __ bind(&done); 4076 __ bind(&done);
4022 4077
4023 // Store the value returned in rax. 4078 // Store the value returned in rax.
4024 switch (assign_type) { 4079 switch (assign_type) {
4025 case VARIABLE: 4080 case VARIABLE:
4026 if (expr->is_postfix()) { 4081 if (expr->is_postfix()) {
4027 // Perform the assignment as if via '='. 4082 // Perform the assignment as if via '='.
4028 { EffectContext context(this); 4083 { EffectContext context(this);
4029 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4084 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
(...skipping 13 matching lines...) Expand all
4043 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4098 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4044 context()->Plug(rax); 4099 context()->Plug(rax);
4045 } 4100 }
4046 break; 4101 break;
4047 case NAMED_PROPERTY: { 4102 case NAMED_PROPERTY: {
4048 __ Move(rcx, prop->key()->AsLiteral()->handle()); 4103 __ Move(rcx, prop->key()->AsLiteral()->handle());
4049 __ pop(rdx); 4104 __ pop(rdx);
4050 Handle<Code> ic = is_classic_mode() 4105 Handle<Code> ic = is_classic_mode()
4051 ? isolate()->builtins()->StoreIC_Initialize() 4106 ? isolate()->builtins()->StoreIC_Initialize()
4052 : isolate()->builtins()->StoreIC_Initialize_Strict(); 4107 : isolate()->builtins()->StoreIC_Initialize_Strict();
4053 __ call(ic, RelocInfo::CODE_TARGET, expr->id()); 4108 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4054 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4109 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4055 if (expr->is_postfix()) { 4110 if (expr->is_postfix()) {
4056 if (!context()->IsEffect()) { 4111 if (!context()->IsEffect()) {
4057 context()->PlugTOS(); 4112 context()->PlugTOS();
4058 } 4113 }
4059 } else { 4114 } else {
4060 context()->Plug(rax); 4115 context()->Plug(rax);
4061 } 4116 }
4062 break; 4117 break;
4063 } 4118 }
4064 case KEYED_PROPERTY: { 4119 case KEYED_PROPERTY: {
4065 __ pop(rcx); 4120 __ pop(rcx);
4066 __ pop(rdx); 4121 __ pop(rdx);
4067 Handle<Code> ic = is_classic_mode() 4122 Handle<Code> ic = is_classic_mode()
4068 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4123 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4069 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4124 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4070 __ call(ic, RelocInfo::CODE_TARGET, expr->id()); 4125 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4071 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4126 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4072 if (expr->is_postfix()) { 4127 if (expr->is_postfix()) {
4073 if (!context()->IsEffect()) { 4128 if (!context()->IsEffect()) {
4074 context()->PlugTOS(); 4129 context()->PlugTOS();
4075 } 4130 }
4076 } else { 4131 } else {
4077 context()->Plug(rax); 4132 context()->Plug(rax);
4078 } 4133 }
4079 break; 4134 break;
4080 } 4135 }
4081 } 4136 }
4082 } 4137 }
4083 4138
4084 4139
4085 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4140 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4086 VariableProxy* proxy = expr->AsVariableProxy(); 4141 VariableProxy* proxy = expr->AsVariableProxy();
4087 ASSERT(!context()->IsEffect()); 4142 ASSERT(!context()->IsEffect());
4088 ASSERT(!context()->IsTest()); 4143 ASSERT(!context()->IsTest());
4089 4144
4090 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4145 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4091 Comment cmnt(masm_, "Global variable"); 4146 Comment cmnt(masm_, "Global variable");
4092 __ Move(rcx, proxy->name()); 4147 __ Move(rcx, proxy->name());
4093 __ movq(rax, GlobalObjectOperand()); 4148 __ movq(rax, GlobalObjectOperand());
4094 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 4149 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4095 // Use a regular load, not a contextual load, to avoid a reference 4150 // Use a regular load, not a contextual load, to avoid a reference
4096 // error. 4151 // error.
4097 __ call(ic); 4152 CallIC(ic);
4098 PrepareForBailout(expr, TOS_REG); 4153 PrepareForBailout(expr, TOS_REG);
4099 context()->Plug(rax); 4154 context()->Plug(rax);
4100 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4155 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4101 Label done, slow; 4156 Label done, slow;
4102 4157
4103 // Generate code for loading from variables potentially shadowed 4158 // Generate code for loading from variables potentially shadowed
4104 // by eval-introduced variables. 4159 // by eval-introduced variables.
4105 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4160 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4106 4161
4107 __ bind(&slow); 4162 __ bind(&slow);
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
4267 __ or_(rcx, rax); 4322 __ or_(rcx, rax);
4268 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); 4323 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4269 __ cmpq(rdx, rax); 4324 __ cmpq(rdx, rax);
4270 Split(cc, if_true, if_false, NULL); 4325 Split(cc, if_true, if_false, NULL);
4271 __ bind(&slow_case); 4326 __ bind(&slow_case);
4272 } 4327 }
4273 4328
4274 // Record position and call the compare IC. 4329 // Record position and call the compare IC.
4275 SetSourcePosition(expr->position()); 4330 SetSourcePosition(expr->position());
4276 Handle<Code> ic = CompareIC::GetUninitialized(op); 4331 Handle<Code> ic = CompareIC::GetUninitialized(op);
4277 __ call(ic, RelocInfo::CODE_TARGET, expr->id()); 4332 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4278 patch_site.EmitPatchInfo(); 4333 patch_site.EmitPatchInfo();
4279 4334
4280 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4335 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4281 __ testq(rax, rax); 4336 __ testq(rax, rax);
4282 Split(cc, if_true, if_false, fall_through); 4337 Split(cc, if_true, if_false, fall_through);
4283 } 4338 }
4284 } 4339 }
4285 4340
4286 // Convert the result of the comparison into one expected for this 4341 // Convert the result of the comparison into one expected for this
4287 // expression's context. 4342 // expression's context.
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
4429 *context_length = 0; 4484 *context_length = 0;
4430 return previous_; 4485 return previous_;
4431 } 4486 }
4432 4487
4433 4488
4434 #undef __ 4489 #undef __
4435 4490
4436 } } // namespace v8::internal 4491 } } // namespace v8::internal
4437 4492
4438 #endif // V8_TARGET_ARCH_X64 4493 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/deoptimizer-x64.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698