Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(249)

Side by Side Diff: src/arm/full-codegen-arm.cc

Issue 9837004: Port count-based profiler to ARM (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: addressed comments Created 8 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/deoptimizer-arm.cc ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 16 matching lines...) Expand all
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_ARM) 30 #if defined(V8_TARGET_ARCH_ARM)
31 31
32 #include "code-stubs.h" 32 #include "code-stubs.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "isolate-inl.h"
37 #include "parser.h" 38 #include "parser.h"
38 #include "scopes.h" 39 #include "scopes.h"
39 #include "stub-cache.h" 40 #include "stub-cache.h"
40 41
41 #include "arm/code-stubs-arm.h" 42 #include "arm/code-stubs-arm.h"
42 #include "arm/macro-assembler-arm.h" 43 #include "arm/macro-assembler-arm.h"
43 44
44 namespace v8 { 45 namespace v8 {
45 namespace internal { 46 namespace internal {
46 47
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
102 103
103 private: 104 private:
104 MacroAssembler* masm_; 105 MacroAssembler* masm_;
105 Label patch_site_; 106 Label patch_site_;
106 #ifdef DEBUG 107 #ifdef DEBUG
107 bool info_emitted_; 108 bool info_emitted_;
108 #endif 109 #endif
109 }; 110 };
110 111
111 112
113 // TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
112 int FullCodeGenerator::self_optimization_header_size() { 114 int FullCodeGenerator::self_optimization_header_size() {
115 UNREACHABLE();
113 return 24; 116 return 24;
114 } 117 }
115 118
116 119
117 // Generate code for a JS function. On entry to the function the receiver 120 // Generate code for a JS function. On entry to the function the receiver
118 // and arguments have been pushed on the stack left to right. The actual 121 // and arguments have been pushed on the stack left to right. The actual
119 // argument count matches the formal parameter count expected by the 122 // argument count matches the formal parameter count expected by the
120 // function. 123 // function.
121 // 124 //
122 // The live registers are: 125 // The live registers are:
123 // o r1: the JS function object being called (i.e., ourselves) 126 // o r1: the JS function object being called (i.e., ourselves)
124 // o cp: our context 127 // o cp: our context
125 // o fp: our caller's frame pointer 128 // o fp: our caller's frame pointer
126 // o sp: stack pointer 129 // o sp: stack pointer
127 // o lr: return address 130 // o lr: return address
128 // 131 //
129 // The function builds a JS frame. Please see JavaScriptFrameConstants in 132 // The function builds a JS frame. Please see JavaScriptFrameConstants in
130 // frames-arm.h for its layout. 133 // frames-arm.h for its layout.
131 void FullCodeGenerator::Generate() { 134 void FullCodeGenerator::Generate() {
132 CompilationInfo* info = info_; 135 CompilationInfo* info = info_;
133 handler_table_ = 136 handler_table_ =
134 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 137 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
138 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
139 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
135 SetFunctionPosition(function()); 140 SetFunctionPosition(function());
136 Comment cmnt(masm_, "[ function compiled by full code generator"); 141 Comment cmnt(masm_, "[ function compiled by full code generator");
137 142
138 // We can optionally optimize based on counters rather than statistical
139 // sampling.
140 if (info->ShouldSelfOptimize()) {
141 if (FLAG_trace_opt_verbose) {
142 PrintF("[adding self-optimization header to %s]\n",
143 *info->function()->debug_name()->ToCString());
144 }
145 has_self_optimization_header_ = true;
146 MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
147 Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
148 JSGlobalPropertyCell* cell;
149 if (maybe_cell->To(&cell)) {
150 __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell)));
151 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
152 __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
153 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
154 Handle<Code> compile_stub(
155 isolate()->builtins()->builtin(Builtins::kLazyRecompile));
156 __ Jump(compile_stub, RelocInfo::CODE_TARGET, eq);
157 ASSERT(masm_->pc_offset() == self_optimization_header_size());
158 }
159 }
160
161 #ifdef DEBUG 143 #ifdef DEBUG
162 if (strlen(FLAG_stop_at) > 0 && 144 if (strlen(FLAG_stop_at) > 0 &&
163 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { 145 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
164 __ stop("stop-at"); 146 __ stop("stop-at");
165 } 147 }
166 #endif 148 #endif
167 149
168 // Strict mode functions and builtins need to replace the receiver 150 // Strict mode functions and builtins need to replace the receiver
169 // with undefined when called as functions (without an explicit 151 // with undefined when called as functions (without an explicit
170 // receiver object). r5 is zero for method calls and non-zero for 152 // receiver object). r5 is zero for method calls and non-zero for
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
329 // of the stack check table. 311 // of the stack check table.
330 masm()->CheckConstPool(true, false); 312 masm()->CheckConstPool(true, false);
331 } 313 }
332 314
333 315
334 void FullCodeGenerator::ClearAccumulator() { 316 void FullCodeGenerator::ClearAccumulator() {
335 __ mov(r0, Operand(Smi::FromInt(0))); 317 __ mov(r0, Operand(Smi::FromInt(0)));
336 } 318 }
337 319
338 320
321 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
322 __ mov(r2, Operand(profiling_counter_));
323 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
324 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
325 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
326 }
327
328
329 void FullCodeGenerator::EmitProfilingCounterReset() {
330 int reset_value = FLAG_interrupt_budget;
331 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
332 // Self-optimization is a one-off thing: if it fails, don't try again.
333 reset_value = Smi::kMaxValue;
334 }
335 if (isolate()->IsDebuggerActive()) {
336 // Detect debug break requests as soon as possible.
337 reset_value = 10;
338 }
339 __ mov(r2, Operand(profiling_counter_));
340 __ mov(r3, Operand(Smi::FromInt(reset_value)));
341 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
342 }
343
344
345 static const int kMaxBackEdgeWeight = 127;
346 static const int kBackEdgeDistanceDivisor = 142;
347
348
339 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, 349 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
340 Label* back_edge_target) { 350 Label* back_edge_target) {
341 Comment cmnt(masm_, "[ Stack check"); 351 Comment cmnt(masm_, "[ Stack check");
342 Label ok; 352 Label ok;
343 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 353
344 __ cmp(sp, Operand(ip)); 354 if (FLAG_count_based_interrupts) {
345 __ b(hs, &ok); 355 int weight = 1;
346 StackCheckStub stub; 356 if (FLAG_weighted_back_edges) {
347 __ CallStub(&stub); 357 ASSERT(back_edge_target->is_bound());
358 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
359 weight = Min(kMaxBackEdgeWeight,
360 Max(1, distance / kBackEdgeDistanceDivisor));
361 }
362 EmitProfilingCounterDecrement(weight);
363 __ b(pl, &ok);
364 InterruptStub stub;
365 __ CallStub(&stub);
366 } else {
367 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
368 __ cmp(sp, Operand(ip));
369 __ b(hs, &ok);
370 StackCheckStub stub;
371 __ CallStub(&stub);
372 }
373
348 // Record a mapping of this PC offset to the OSR id. This is used to find 374 // Record a mapping of this PC offset to the OSR id. This is used to find
349 // the AST id from the unoptimized code in order to use it as a key into 375 // the AST id from the unoptimized code in order to use it as a key into
350 // the deoptimization input data found in the optimized code. 376 // the deoptimization input data found in the optimized code.
351 RecordStackCheck(stmt->OsrEntryId()); 377 RecordStackCheck(stmt->OsrEntryId());
352 378
379 if (FLAG_count_based_interrupts) {
380 EmitProfilingCounterReset();
381 }
382
353 __ bind(&ok); 383 __ bind(&ok);
354 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 384 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
355 // Record a mapping of the OSR id to this PC. This is used if the OSR 385 // Record a mapping of the OSR id to this PC. This is used if the OSR
356 // entry becomes the target of a bailout. We don't expect it to be, but 386 // entry becomes the target of a bailout. We don't expect it to be, but
357 // we want it to work if it is. 387 // we want it to work if it is.
358 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 388 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
359 } 389 }
360 390
361 391
362 void FullCodeGenerator::EmitReturnSequence() { 392 void FullCodeGenerator::EmitReturnSequence() {
363 Comment cmnt(masm_, "[ Return sequence"); 393 Comment cmnt(masm_, "[ Return sequence");
364 if (return_label_.is_bound()) { 394 if (return_label_.is_bound()) {
365 __ b(&return_label_); 395 __ b(&return_label_);
366 } else { 396 } else {
367 __ bind(&return_label_); 397 __ bind(&return_label_);
368 if (FLAG_trace) { 398 if (FLAG_trace) {
369 // Push the return value on the stack as the parameter. 399 // Push the return value on the stack as the parameter.
370 // Runtime::TraceExit returns its parameter in r0. 400 // Runtime::TraceExit returns its parameter in r0.
371 __ push(r0); 401 __ push(r0);
372 __ CallRuntime(Runtime::kTraceExit, 1); 402 __ CallRuntime(Runtime::kTraceExit, 1);
373 } 403 }
404 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
405 // Pretend that the exit is a backwards jump to the entry.
406 int weight = 1;
407 if (info_->ShouldSelfOptimize()) {
408 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
409 } else if (FLAG_weighted_back_edges) {
410 int distance = masm_->pc_offset();
411 weight = Min(kMaxBackEdgeWeight,
412 Max(1, distance / kBackEdgeDistanceDivisor));
413 }
414 EmitProfilingCounterDecrement(weight);
415 Label ok;
416 __ b(pl, &ok);
417 __ push(r0);
418 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
419 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
420 __ push(r2);
421 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
422 } else {
423 InterruptStub stub;
424 __ CallStub(&stub);
425 }
426 __ pop(r0);
427 EmitProfilingCounterReset();
428 __ bind(&ok);
429 }
374 430
375 #ifdef DEBUG 431 #ifdef DEBUG
376 // Add a label for checking the size of the code used for returning. 432 // Add a label for checking the size of the code used for returning.
377 Label check_exit_codesize; 433 Label check_exit_codesize;
378 masm_->bind(&check_exit_codesize); 434 masm_->bind(&check_exit_codesize);
379 #endif 435 #endif
380 // Make sure that the constant pool is not emitted inside of the return 436 // Make sure that the constant pool is not emitted inside of the return
381 // sequence. 437 // sequence.
382 { Assembler::BlockConstPoolScope block_const_pool(masm_); 438 { Assembler::BlockConstPoolScope block_const_pool(masm_);
383 // Here we use masm_-> instead of the __ macro to avoid the code coverage 439 // Here we use masm_-> instead of the __ macro to avoid the code coverage
(...skipping 497 matching lines...) Expand 10 before | Expand all | Expand 10 after
881 __ cmp(r1, r0); 937 __ cmp(r1, r0);
882 __ b(ne, &next_test); 938 __ b(ne, &next_test);
883 __ Drop(1); // Switch value is no longer needed. 939 __ Drop(1); // Switch value is no longer needed.
884 __ b(clause->body_target()); 940 __ b(clause->body_target());
885 __ bind(&slow_case); 941 __ bind(&slow_case);
886 } 942 }
887 943
888 // Record position before stub call for type feedback. 944 // Record position before stub call for type feedback.
889 SetSourcePosition(clause->position()); 945 SetSourcePosition(clause->position());
890 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); 946 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
891 __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId()); 947 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
892 patch_site.EmitPatchInfo(); 948 patch_site.EmitPatchInfo();
893 949
894 __ cmp(r0, Operand(0)); 950 __ cmp(r0, Operand(0));
895 __ b(ne, &next_test); 951 __ b(ne, &next_test);
896 __ Drop(1); // Switch value is no longer needed. 952 __ Drop(1); // Switch value is no longer needed.
897 __ b(clause->body_target()); 953 __ b(clause->body_target());
898 } 954 }
899 955
900 // Discard the test value and jump to the default if present, otherwise to 956 // Discard the test value and jump to the default if present, otherwise to
901 // the end of the statement. 957 // the end of the statement.
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after
1179 __ b(&loop); 1235 __ b(&loop);
1180 __ bind(&fast); 1236 __ bind(&fast);
1181 } 1237 }
1182 1238
1183 __ ldr(r0, GlobalObjectOperand()); 1239 __ ldr(r0, GlobalObjectOperand());
1184 __ mov(r2, Operand(var->name())); 1240 __ mov(r2, Operand(var->name()));
1185 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1241 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1186 ? RelocInfo::CODE_TARGET 1242 ? RelocInfo::CODE_TARGET
1187 : RelocInfo::CODE_TARGET_CONTEXT; 1243 : RelocInfo::CODE_TARGET_CONTEXT;
1188 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1244 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1189 __ Call(ic, mode); 1245 CallIC(ic, mode);
1190 } 1246 }
1191 1247
1192 1248
1193 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1249 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1194 Label* slow) { 1250 Label* slow) {
1195 ASSERT(var->IsContextSlot()); 1251 ASSERT(var->IsContextSlot());
1196 Register context = cp; 1252 Register context = cp;
1197 Register next = r3; 1253 Register next = r3;
1198 Register temp = r4; 1254 Register temp = r4;
1199 1255
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1263 // Three cases: global variables, lookup variables, and all other types of 1319 // Three cases: global variables, lookup variables, and all other types of
1264 // variables. 1320 // variables.
1265 switch (var->location()) { 1321 switch (var->location()) {
1266 case Variable::UNALLOCATED: { 1322 case Variable::UNALLOCATED: {
1267 Comment cmnt(masm_, "Global variable"); 1323 Comment cmnt(masm_, "Global variable");
1268 // Use inline caching. Variable name is passed in r2 and the global 1324 // Use inline caching. Variable name is passed in r2 and the global
1269 // object (receiver) in r0. 1325 // object (receiver) in r0.
1270 __ ldr(r0, GlobalObjectOperand()); 1326 __ ldr(r0, GlobalObjectOperand());
1271 __ mov(r2, Operand(var->name())); 1327 __ mov(r2, Operand(var->name()));
1272 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1328 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1273 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1329 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1274 context()->Plug(r0); 1330 context()->Plug(r0);
1275 break; 1331 break;
1276 } 1332 }
1277 1333
1278 case Variable::PARAMETER: 1334 case Variable::PARAMETER:
1279 case Variable::LOCAL: 1335 case Variable::LOCAL:
1280 case Variable::CONTEXT: { 1336 case Variable::CONTEXT: {
1281 Comment cmnt(masm_, var->IsContextSlot() 1337 Comment cmnt(masm_, var->IsContextSlot()
1282 ? "Context variable" 1338 ? "Context variable"
1283 : "Stack variable"); 1339 : "Stack variable");
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after
1474 // Fall through. 1530 // Fall through.
1475 case ObjectLiteral::Property::COMPUTED: 1531 case ObjectLiteral::Property::COMPUTED:
1476 if (key->handle()->IsSymbol()) { 1532 if (key->handle()->IsSymbol()) {
1477 if (property->emit_store()) { 1533 if (property->emit_store()) {
1478 VisitForAccumulatorValue(value); 1534 VisitForAccumulatorValue(value);
1479 __ mov(r2, Operand(key->handle())); 1535 __ mov(r2, Operand(key->handle()));
1480 __ ldr(r1, MemOperand(sp)); 1536 __ ldr(r1, MemOperand(sp));
1481 Handle<Code> ic = is_classic_mode() 1537 Handle<Code> ic = is_classic_mode()
1482 ? isolate()->builtins()->StoreIC_Initialize() 1538 ? isolate()->builtins()->StoreIC_Initialize()
1483 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1539 : isolate()->builtins()->StoreIC_Initialize_Strict();
1484 __ Call(ic, RelocInfo::CODE_TARGET, key->id()); 1540 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1485 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1541 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1486 } else { 1542 } else {
1487 VisitForEffect(value); 1543 VisitForEffect(value);
1488 } 1544 }
1489 break; 1545 break;
1490 } 1546 }
1491 // Fall through. 1547 // Fall through.
1492 case ObjectLiteral::Property::PROTOTYPE: 1548 case ObjectLiteral::Property::PROTOTYPE:
1493 // Duplicate receiver on stack. 1549 // Duplicate receiver on stack.
1494 __ ldr(r0, MemOperand(sp)); 1550 __ ldr(r0, MemOperand(sp));
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after
1742 } 1798 }
1743 } 1799 }
1744 1800
1745 1801
1746 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 1802 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1747 SetSourcePosition(prop->position()); 1803 SetSourcePosition(prop->position());
1748 Literal* key = prop->key()->AsLiteral(); 1804 Literal* key = prop->key()->AsLiteral();
1749 __ mov(r2, Operand(key->handle())); 1805 __ mov(r2, Operand(key->handle()));
1750 // Call load IC. It has arguments receiver and property name r0 and r2. 1806 // Call load IC. It has arguments receiver and property name r0 and r2.
1751 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1807 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1752 __ Call(ic, RelocInfo::CODE_TARGET, prop->id()); 1808 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1753 } 1809 }
1754 1810
1755 1811
1756 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1812 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1757 SetSourcePosition(prop->position()); 1813 SetSourcePosition(prop->position());
1758 // Call keyed load IC. It has arguments key and receiver in r0 and r1. 1814 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1759 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 1815 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1760 __ Call(ic, RelocInfo::CODE_TARGET, prop->id()); 1816 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1761 } 1817 }
1762 1818
1763 1819
1764 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1820 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1765 Token::Value op, 1821 Token::Value op,
1766 OverwriteMode mode, 1822 OverwriteMode mode,
1767 Expression* left_expr, 1823 Expression* left_expr,
1768 Expression* right_expr) { 1824 Expression* right_expr) {
1769 Label done, smi_case, stub_call; 1825 Label done, smi_case, stub_call;
1770 1826
1771 Register scratch1 = r2; 1827 Register scratch1 = r2;
1772 Register scratch2 = r3; 1828 Register scratch2 = r3;
1773 1829
1774 // Get the arguments. 1830 // Get the arguments.
1775 Register left = r1; 1831 Register left = r1;
1776 Register right = r0; 1832 Register right = r0;
1777 __ pop(left); 1833 __ pop(left);
1778 1834
1779 // Perform combined smi check on both operands. 1835 // Perform combined smi check on both operands.
1780 __ orr(scratch1, left, Operand(right)); 1836 __ orr(scratch1, left, Operand(right));
1781 STATIC_ASSERT(kSmiTag == 0); 1837 STATIC_ASSERT(kSmiTag == 0);
1782 JumpPatchSite patch_site(masm_); 1838 JumpPatchSite patch_site(masm_);
1783 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 1839 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1784 1840
1785 __ bind(&stub_call); 1841 __ bind(&stub_call);
1786 BinaryOpStub stub(op, mode); 1842 BinaryOpStub stub(op, mode);
1787 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); 1843 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1788 patch_site.EmitPatchInfo(); 1844 patch_site.EmitPatchInfo();
1789 __ jmp(&done); 1845 __ jmp(&done);
1790 1846
1791 __ bind(&smi_case); 1847 __ bind(&smi_case);
1792 // Smi case. This code works the same way as the smi-smi case in the type 1848 // Smi case. This code works the same way as the smi-smi case in the type
1793 // recording binary operation stub, see 1849 // recording binary operation stub, see
1794 // BinaryOpStub::GenerateSmiSmiOperation for comments. 1850 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1795 switch (op) { 1851 switch (op) {
1796 case Token::SAR: 1852 case Token::SAR:
1797 __ b(&stub_call); 1853 __ b(&stub_call);
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1860 context()->Plug(r0); 1916 context()->Plug(r0);
1861 } 1917 }
1862 1918
1863 1919
1864 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 1920 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1865 Token::Value op, 1921 Token::Value op,
1866 OverwriteMode mode) { 1922 OverwriteMode mode) {
1867 __ pop(r1); 1923 __ pop(r1);
1868 BinaryOpStub stub(op, mode); 1924 BinaryOpStub stub(op, mode);
1869 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 1925 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1870 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); 1926 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1871 patch_site.EmitPatchInfo(); 1927 patch_site.EmitPatchInfo();
1872 context()->Plug(r0); 1928 context()->Plug(r0);
1873 } 1929 }
1874 1930
1875 1931
1876 void FullCodeGenerator::EmitAssignment(Expression* expr) { 1932 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1877 // Invalid left-hand sides are rewritten to have a 'throw 1933 // Invalid left-hand sides are rewritten to have a 'throw
1878 // ReferenceError' on the left-hand side. 1934 // ReferenceError' on the left-hand side.
1879 if (!expr->IsValidLeftHandSide()) { 1935 if (!expr->IsValidLeftHandSide()) {
1880 VisitForEffect(expr); 1936 VisitForEffect(expr);
(...skipping 20 matching lines...) Expand all
1901 } 1957 }
1902 case NAMED_PROPERTY: { 1958 case NAMED_PROPERTY: {
1903 __ push(r0); // Preserve value. 1959 __ push(r0); // Preserve value.
1904 VisitForAccumulatorValue(prop->obj()); 1960 VisitForAccumulatorValue(prop->obj());
1905 __ mov(r1, r0); 1961 __ mov(r1, r0);
1906 __ pop(r0); // Restore value. 1962 __ pop(r0); // Restore value.
1907 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 1963 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1908 Handle<Code> ic = is_classic_mode() 1964 Handle<Code> ic = is_classic_mode()
1909 ? isolate()->builtins()->StoreIC_Initialize() 1965 ? isolate()->builtins()->StoreIC_Initialize()
1910 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1966 : isolate()->builtins()->StoreIC_Initialize_Strict();
1911 __ Call(ic); 1967 CallIC(ic);
1912 break; 1968 break;
1913 } 1969 }
1914 case KEYED_PROPERTY: { 1970 case KEYED_PROPERTY: {
1915 __ push(r0); // Preserve value. 1971 __ push(r0); // Preserve value.
1916 VisitForStackValue(prop->obj()); 1972 VisitForStackValue(prop->obj());
1917 VisitForAccumulatorValue(prop->key()); 1973 VisitForAccumulatorValue(prop->key());
1918 __ mov(r1, r0); 1974 __ mov(r1, r0);
1919 __ pop(r2); 1975 __ pop(r2);
1920 __ pop(r0); // Restore value. 1976 __ pop(r0); // Restore value.
1921 Handle<Code> ic = is_classic_mode() 1977 Handle<Code> ic = is_classic_mode()
1922 ? isolate()->builtins()->KeyedStoreIC_Initialize() 1978 ? isolate()->builtins()->KeyedStoreIC_Initialize()
1923 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 1979 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1924 __ Call(ic); 1980 CallIC(ic);
1925 break; 1981 break;
1926 } 1982 }
1927 } 1983 }
1928 context()->Plug(r0); 1984 context()->Plug(r0);
1929 } 1985 }
1930 1986
1931 1987
1932 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1988 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1933 Token::Value op) { 1989 Token::Value op) {
1934 if (var->IsUnallocated()) { 1990 if (var->IsUnallocated()) {
1935 // Global var, const, or let. 1991 // Global var, const, or let.
1936 __ mov(r2, Operand(var->name())); 1992 __ mov(r2, Operand(var->name()));
1937 __ ldr(r1, GlobalObjectOperand()); 1993 __ ldr(r1, GlobalObjectOperand());
1938 Handle<Code> ic = is_classic_mode() 1994 Handle<Code> ic = is_classic_mode()
1939 ? isolate()->builtins()->StoreIC_Initialize() 1995 ? isolate()->builtins()->StoreIC_Initialize()
1940 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1996 : isolate()->builtins()->StoreIC_Initialize_Strict();
1941 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 1997 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1942 1998
1943 } else if (op == Token::INIT_CONST) { 1999 } else if (op == Token::INIT_CONST) {
1944 // Const initializers need a write barrier. 2000 // Const initializers need a write barrier.
1945 ASSERT(!var->IsParameter()); // No const parameters. 2001 ASSERT(!var->IsParameter()); // No const parameters.
1946 if (var->IsStackLocal()) { 2002 if (var->IsStackLocal()) {
1947 Label skip; 2003 Label skip;
1948 __ ldr(r1, StackOperand(var)); 2004 __ ldr(r1, StackOperand(var));
1949 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); 2005 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
1950 __ b(ne, &skip); 2006 __ b(ne, &skip);
1951 __ str(result_register(), StackOperand(var)); 2007 __ str(result_register(), StackOperand(var));
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
2049 // receiver into fast case. 2105 // receiver into fast case.
2050 if (expr->ends_initialization_block()) { 2106 if (expr->ends_initialization_block()) {
2051 __ ldr(r1, MemOperand(sp)); 2107 __ ldr(r1, MemOperand(sp));
2052 } else { 2108 } else {
2053 __ pop(r1); 2109 __ pop(r1);
2054 } 2110 }
2055 2111
2056 Handle<Code> ic = is_classic_mode() 2112 Handle<Code> ic = is_classic_mode()
2057 ? isolate()->builtins()->StoreIC_Initialize() 2113 ? isolate()->builtins()->StoreIC_Initialize()
2058 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2114 : isolate()->builtins()->StoreIC_Initialize_Strict();
2059 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 2115 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2060 2116
2061 // If the assignment ends an initialization block, revert to fast case. 2117 // If the assignment ends an initialization block, revert to fast case.
2062 if (expr->ends_initialization_block()) { 2118 if (expr->ends_initialization_block()) {
2063 __ push(r0); // Result of assignment, saved even if not needed. 2119 __ push(r0); // Result of assignment, saved even if not needed.
2064 // Receiver is under the result value. 2120 // Receiver is under the result value.
2065 __ ldr(ip, MemOperand(sp, kPointerSize)); 2121 __ ldr(ip, MemOperand(sp, kPointerSize));
2066 __ push(ip); 2122 __ push(ip);
2067 __ CallRuntime(Runtime::kToFastProperties, 1); 2123 __ CallRuntime(Runtime::kToFastProperties, 1);
2068 __ pop(r0); 2124 __ pop(r0);
2069 __ Drop(1); 2125 __ Drop(1);
(...skipping 25 matching lines...) Expand all
2095 // receiver into fast case. 2151 // receiver into fast case.
2096 if (expr->ends_initialization_block()) { 2152 if (expr->ends_initialization_block()) {
2097 __ ldr(r2, MemOperand(sp)); 2153 __ ldr(r2, MemOperand(sp));
2098 } else { 2154 } else {
2099 __ pop(r2); 2155 __ pop(r2);
2100 } 2156 }
2101 2157
2102 Handle<Code> ic = is_classic_mode() 2158 Handle<Code> ic = is_classic_mode()
2103 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2159 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2104 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2160 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2105 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 2161 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2106 2162
2107 // If the assignment ends an initialization block, revert to fast case. 2163 // If the assignment ends an initialization block, revert to fast case.
2108 if (expr->ends_initialization_block()) { 2164 if (expr->ends_initialization_block()) {
2109 __ push(r0); // Result of assignment, saved even if not needed. 2165 __ push(r0); // Result of assignment, saved even if not needed.
2110 // Receiver is under the result value. 2166 // Receiver is under the result value.
2111 __ ldr(ip, MemOperand(sp, kPointerSize)); 2167 __ ldr(ip, MemOperand(sp, kPointerSize));
2112 __ push(ip); 2168 __ push(ip);
2113 __ CallRuntime(Runtime::kToFastProperties, 1); 2169 __ CallRuntime(Runtime::kToFastProperties, 1);
2114 __ pop(r0); 2170 __ pop(r0);
2115 __ Drop(1); 2171 __ Drop(1);
(...skipping 13 matching lines...) Expand all
2129 context()->Plug(r0); 2185 context()->Plug(r0);
2130 } else { 2186 } else {
2131 VisitForStackValue(expr->obj()); 2187 VisitForStackValue(expr->obj());
2132 VisitForAccumulatorValue(expr->key()); 2188 VisitForAccumulatorValue(expr->key());
2133 __ pop(r1); 2189 __ pop(r1);
2134 EmitKeyedPropertyLoad(expr); 2190 EmitKeyedPropertyLoad(expr);
2135 context()->Plug(r0); 2191 context()->Plug(r0);
2136 } 2192 }
2137 } 2193 }
2138 2194
2195
2196 void FullCodeGenerator::CallIC(Handle<Code> code,
2197 RelocInfo::Mode rmode,
2198 unsigned ast_id) {
2199 ic_total_count_++;
2200 __ Call(code, rmode, ast_id);
2201 }
2202
2139 void FullCodeGenerator::EmitCallWithIC(Call* expr, 2203 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2140 Handle<Object> name, 2204 Handle<Object> name,
2141 RelocInfo::Mode mode) { 2205 RelocInfo::Mode mode) {
2142 // Code common for calls using the IC. 2206 // Code common for calls using the IC.
2143 ZoneList<Expression*>* args = expr->arguments(); 2207 ZoneList<Expression*>* args = expr->arguments();
2144 int arg_count = args->length(); 2208 int arg_count = args->length();
2145 { PreservePositionScope scope(masm()->positions_recorder()); 2209 { PreservePositionScope scope(masm()->positions_recorder());
2146 for (int i = 0; i < arg_count; i++) { 2210 for (int i = 0; i < arg_count; i++) {
2147 VisitForStackValue(args->at(i)); 2211 VisitForStackValue(args->at(i));
2148 } 2212 }
2149 __ mov(r2, Operand(name)); 2213 __ mov(r2, Operand(name));
2150 } 2214 }
2151 // Record source position for debugger. 2215 // Record source position for debugger.
2152 SetSourcePosition(expr->position()); 2216 SetSourcePosition(expr->position());
2153 // Call the IC initialization code. 2217 // Call the IC initialization code.
2154 Handle<Code> ic = 2218 Handle<Code> ic =
2155 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 2219 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2156 __ Call(ic, mode, expr->id()); 2220 CallIC(ic, mode, expr->id());
2157 RecordJSReturnSite(expr); 2221 RecordJSReturnSite(expr);
2158 // Restore context register. 2222 // Restore context register.
2159 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2223 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2160 context()->Plug(r0); 2224 context()->Plug(r0);
2161 } 2225 }
2162 2226
2163 2227
2164 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2228 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2165 Expression* key) { 2229 Expression* key) {
2166 // Load the key. 2230 // Load the key.
(...skipping 12 matching lines...) Expand all
2179 for (int i = 0; i < arg_count; i++) { 2243 for (int i = 0; i < arg_count; i++) {
2180 VisitForStackValue(args->at(i)); 2244 VisitForStackValue(args->at(i));
2181 } 2245 }
2182 } 2246 }
2183 // Record source position for debugger. 2247 // Record source position for debugger.
2184 SetSourcePosition(expr->position()); 2248 SetSourcePosition(expr->position());
2185 // Call the IC initialization code. 2249 // Call the IC initialization code.
2186 Handle<Code> ic = 2250 Handle<Code> ic =
2187 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); 2251 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2188 __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. 2252 __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2189 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 2253 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2190 RecordJSReturnSite(expr); 2254 RecordJSReturnSite(expr);
2191 // Restore context register. 2255 // Restore context register.
2192 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2256 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2193 context()->DropAndPlug(1, r0); // Drop the key still on the stack. 2257 context()->DropAndPlug(1, r0); // Drop the key still on the stack.
2194 } 2258 }
2195 2259
2196 2260
2197 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { 2261 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2198 // Code common for calls using the call stub. 2262 // Code common for calls using the call stub.
2199 ZoneList<Expression*>* args = expr->arguments(); 2263 ZoneList<Expression*>* args = expr->arguments();
(...skipping 1576 matching lines...) Expand 10 before | Expand all | Expand 10 after
3776 for (int i = 0; i < arg_count; i++) { 3840 for (int i = 0; i < arg_count; i++) {
3777 VisitForStackValue(args->at(i)); 3841 VisitForStackValue(args->at(i));
3778 } 3842 }
3779 3843
3780 if (expr->is_jsruntime()) { 3844 if (expr->is_jsruntime()) {
3781 // Call the JS runtime function. 3845 // Call the JS runtime function.
3782 __ mov(r2, Operand(expr->name())); 3846 __ mov(r2, Operand(expr->name()));
3783 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 3847 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3784 Handle<Code> ic = 3848 Handle<Code> ic =
3785 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 3849 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3786 __ Call(ic, mode, expr->id()); 3850 CallIC(ic, mode, expr->id());
3787 // Restore context register. 3851 // Restore context register.
3788 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3852 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3789 } else { 3853 } else {
3790 // Call the C runtime function. 3854 // Call the C runtime function.
3791 __ CallRuntime(expr->function(), arg_count); 3855 __ CallRuntime(expr->function(), arg_count);
3792 } 3856 }
3793 context()->Plug(r0); 3857 context()->Plug(r0);
3794 } 3858 }
3795 3859
3796 3860
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
3931 // TODO(svenpanne): Allowing format strings in Comment would be nice here... 3995 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3932 Comment cmt(masm_, comment); 3996 Comment cmt(masm_, comment);
3933 bool can_overwrite = expr->expression()->ResultOverwriteAllowed(); 3997 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3934 UnaryOverwriteMode overwrite = 3998 UnaryOverwriteMode overwrite =
3935 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 3999 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3936 UnaryOpStub stub(expr->op(), overwrite); 4000 UnaryOpStub stub(expr->op(), overwrite);
3937 // UnaryOpStub expects the argument to be in the 4001 // UnaryOpStub expects the argument to be in the
3938 // accumulator register r0. 4002 // accumulator register r0.
3939 VisitForAccumulatorValue(expr->expression()); 4003 VisitForAccumulatorValue(expr->expression());
3940 SetSourcePosition(expr->position()); 4004 SetSourcePosition(expr->position());
3941 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id()); 4005 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3942 context()->Plug(r0); 4006 context()->Plug(r0);
3943 } 4007 }
3944 4008
3945 4009
3946 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4010 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3947 Comment cmnt(masm_, "[ CountOperation"); 4011 Comment cmnt(masm_, "[ CountOperation");
3948 SetSourcePosition(expr->position()); 4012 SetSourcePosition(expr->position());
3949 4013
3950 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 4014 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3951 // as the left-hand side. 4015 // as the left-hand side.
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
4042 __ bind(&stub_call); 4106 __ bind(&stub_call);
4043 // Call stub. Undo operation first. 4107 // Call stub. Undo operation first.
4044 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 4108 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4045 } 4109 }
4046 __ mov(r1, Operand(Smi::FromInt(count_value))); 4110 __ mov(r1, Operand(Smi::FromInt(count_value)));
4047 4111
4048 // Record position before stub call. 4112 // Record position before stub call.
4049 SetSourcePosition(expr->position()); 4113 SetSourcePosition(expr->position());
4050 4114
4051 BinaryOpStub stub(Token::ADD, NO_OVERWRITE); 4115 BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4052 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId()); 4116 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4053 patch_site.EmitPatchInfo(); 4117 patch_site.EmitPatchInfo();
4054 __ bind(&done); 4118 __ bind(&done);
4055 4119
4056 // Store the value returned in r0. 4120 // Store the value returned in r0.
4057 switch (assign_type) { 4121 switch (assign_type) {
4058 case VARIABLE: 4122 case VARIABLE:
4059 if (expr->is_postfix()) { 4123 if (expr->is_postfix()) {
4060 { EffectContext context(this); 4124 { EffectContext context(this);
4061 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4125 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4062 Token::ASSIGN); 4126 Token::ASSIGN);
(...skipping 11 matching lines...) Expand all
4074 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4138 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4075 context()->Plug(r0); 4139 context()->Plug(r0);
4076 } 4140 }
4077 break; 4141 break;
4078 case NAMED_PROPERTY: { 4142 case NAMED_PROPERTY: {
4079 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 4143 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
4080 __ pop(r1); 4144 __ pop(r1);
4081 Handle<Code> ic = is_classic_mode() 4145 Handle<Code> ic = is_classic_mode()
4082 ? isolate()->builtins()->StoreIC_Initialize() 4146 ? isolate()->builtins()->StoreIC_Initialize()
4083 : isolate()->builtins()->StoreIC_Initialize_Strict(); 4147 : isolate()->builtins()->StoreIC_Initialize_Strict();
4084 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 4148 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4085 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4149 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4086 if (expr->is_postfix()) { 4150 if (expr->is_postfix()) {
4087 if (!context()->IsEffect()) { 4151 if (!context()->IsEffect()) {
4088 context()->PlugTOS(); 4152 context()->PlugTOS();
4089 } 4153 }
4090 } else { 4154 } else {
4091 context()->Plug(r0); 4155 context()->Plug(r0);
4092 } 4156 }
4093 break; 4157 break;
4094 } 4158 }
4095 case KEYED_PROPERTY: { 4159 case KEYED_PROPERTY: {
4096 __ pop(r1); // Key. 4160 __ pop(r1); // Key.
4097 __ pop(r2); // Receiver. 4161 __ pop(r2); // Receiver.
4098 Handle<Code> ic = is_classic_mode() 4162 Handle<Code> ic = is_classic_mode()
4099 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4163 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4100 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4164 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4101 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 4165 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4102 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4166 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4103 if (expr->is_postfix()) { 4167 if (expr->is_postfix()) {
4104 if (!context()->IsEffect()) { 4168 if (!context()->IsEffect()) {
4105 context()->PlugTOS(); 4169 context()->PlugTOS();
4106 } 4170 }
4107 } else { 4171 } else {
4108 context()->Plug(r0); 4172 context()->Plug(r0);
4109 } 4173 }
4110 break; 4174 break;
4111 } 4175 }
4112 } 4176 }
4113 } 4177 }
4114 4178
4115 4179
4116 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4180 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4117 ASSERT(!context()->IsEffect()); 4181 ASSERT(!context()->IsEffect());
4118 ASSERT(!context()->IsTest()); 4182 ASSERT(!context()->IsTest());
4119 VariableProxy* proxy = expr->AsVariableProxy(); 4183 VariableProxy* proxy = expr->AsVariableProxy();
4120 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4184 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4121 Comment cmnt(masm_, "Global variable"); 4185 Comment cmnt(masm_, "Global variable");
4122 __ ldr(r0, GlobalObjectOperand()); 4186 __ ldr(r0, GlobalObjectOperand());
4123 __ mov(r2, Operand(proxy->name())); 4187 __ mov(r2, Operand(proxy->name()));
4124 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 4188 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4125 // Use a regular load, not a contextual load, to avoid a reference 4189 // Use a regular load, not a contextual load, to avoid a reference
4126 // error. 4190 // error.
4127 __ Call(ic); 4191 CallIC(ic);
4128 PrepareForBailout(expr, TOS_REG); 4192 PrepareForBailout(expr, TOS_REG);
4129 context()->Plug(r0); 4193 context()->Plug(r0);
4130 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4194 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4131 Label done, slow; 4195 Label done, slow;
4132 4196
4133 // Generate code for loading from variables potentially shadowed 4197 // Generate code for loading from variables potentially shadowed
4134 // by eval-introduced variables. 4198 // by eval-introduced variables.
4135 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4199 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4136 4200
4137 __ bind(&slow); 4201 __ bind(&slow);
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
4300 __ orr(r2, r0, Operand(r1)); 4364 __ orr(r2, r0, Operand(r1));
4301 patch_site.EmitJumpIfNotSmi(r2, &slow_case); 4365 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4302 __ cmp(r1, r0); 4366 __ cmp(r1, r0);
4303 Split(cond, if_true, if_false, NULL); 4367 Split(cond, if_true, if_false, NULL);
4304 __ bind(&slow_case); 4368 __ bind(&slow_case);
4305 } 4369 }
4306 4370
4307 // Record position and call the compare IC. 4371 // Record position and call the compare IC.
4308 SetSourcePosition(expr->position()); 4372 SetSourcePosition(expr->position());
4309 Handle<Code> ic = CompareIC::GetUninitialized(op); 4373 Handle<Code> ic = CompareIC::GetUninitialized(op);
4310 __ Call(ic, RelocInfo::CODE_TARGET, expr->id()); 4374 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4311 patch_site.EmitPatchInfo(); 4375 patch_site.EmitPatchInfo();
4312 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4376 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4313 __ cmp(r0, Operand(0)); 4377 __ cmp(r0, Operand(0));
4314 Split(cond, if_true, if_false, fall_through); 4378 Split(cond, if_true, if_false, fall_through);
4315 } 4379 }
4316 } 4380 }
4317 4381
4318 // Convert the result of the comparison into one expected for this 4382 // Convert the result of the comparison into one expected for this
4319 // expression's context. 4383 // expression's context.
4320 context()->Plug(if_true, if_false); 4384 context()->Plug(if_true, if_false);
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
4462 *context_length = 0; 4526 *context_length = 0;
4463 return previous_; 4527 return previous_;
4464 } 4528 }
4465 4529
4466 4530
4467 #undef __ 4531 #undef __
4468 4532
4469 } } // namespace v8::internal 4533 } } // namespace v8::internal
4470 4534
4471 #endif // V8_TARGET_ARCH_ARM 4535 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/deoptimizer-arm.cc ('k') | src/ia32/full-codegen-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698