| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 16 matching lines...) Expand all  Loading... | 
| 27 | 27 | 
| 28 #include "v8.h" | 28 #include "v8.h" | 
| 29 | 29 | 
| 30 #if defined(V8_TARGET_ARCH_IA32) | 30 #if defined(V8_TARGET_ARCH_IA32) | 
| 31 | 31 | 
| 32 #include "code-stubs.h" | 32 #include "code-stubs.h" | 
| 33 #include "codegen.h" | 33 #include "codegen.h" | 
| 34 #include "compiler.h" | 34 #include "compiler.h" | 
| 35 #include "debug.h" | 35 #include "debug.h" | 
| 36 #include "full-codegen.h" | 36 #include "full-codegen.h" | 
|  | 37 #include "isolate-inl.h" | 
| 37 #include "parser.h" | 38 #include "parser.h" | 
| 38 #include "scopes.h" | 39 #include "scopes.h" | 
| 39 #include "stub-cache.h" | 40 #include "stub-cache.h" | 
| 40 | 41 | 
| 41 namespace v8 { | 42 namespace v8 { | 
| 42 namespace internal { | 43 namespace internal { | 
| 43 | 44 | 
| 44 #define __ ACCESS_MASM(masm_) | 45 #define __ ACCESS_MASM(masm_) | 
| 45 | 46 | 
| 46 | 47 | 
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 93   } | 94   } | 
| 94 | 95 | 
| 95   MacroAssembler* masm_; | 96   MacroAssembler* masm_; | 
| 96   Label patch_site_; | 97   Label patch_site_; | 
| 97 #ifdef DEBUG | 98 #ifdef DEBUG | 
| 98   bool info_emitted_; | 99   bool info_emitted_; | 
| 99 #endif | 100 #endif | 
| 100 }; | 101 }; | 
| 101 | 102 | 
| 102 | 103 | 
|  | 104 // TODO(jkummerow): Obsolete as soon as x64 is updated. Remove. | 
| 103 int FullCodeGenerator::self_optimization_header_size() { | 105 int FullCodeGenerator::self_optimization_header_size() { | 
|  | 106   UNREACHABLE(); | 
| 104   return 13; | 107   return 13; | 
| 105 } | 108 } | 
| 106 | 109 | 
| 107 | 110 | 
| 108 // Generate code for a JS function.  On entry to the function the receiver | 111 // Generate code for a JS function.  On entry to the function the receiver | 
| 109 // and arguments have been pushed on the stack left to right, with the | 112 // and arguments have been pushed on the stack left to right, with the | 
| 110 // return address on top of them.  The actual argument count matches the | 113 // return address on top of them.  The actual argument count matches the | 
| 111 // formal parameter count expected by the function. | 114 // formal parameter count expected by the function. | 
| 112 // | 115 // | 
| 113 // The live registers are: | 116 // The live registers are: | 
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 314          Immediate(Smi::FromInt(delta))); | 317          Immediate(Smi::FromInt(delta))); | 
| 315 } | 318 } | 
| 316 | 319 | 
| 317 | 320 | 
| 318 void FullCodeGenerator::EmitProfilingCounterReset() { | 321 void FullCodeGenerator::EmitProfilingCounterReset() { | 
| 319   int reset_value = FLAG_interrupt_budget; | 322   int reset_value = FLAG_interrupt_budget; | 
| 320   if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | 323   if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | 
| 321     // Self-optimization is a one-off thing: if it fails, don't try again. | 324     // Self-optimization is a one-off thing: if it fails, don't try again. | 
| 322     reset_value = Smi::kMaxValue; | 325     reset_value = Smi::kMaxValue; | 
| 323   } | 326   } | 
|  | 327   if (isolate()->IsDebuggerActive()) { | 
|  | 328     // Detect debug break requests as soon as possible. | 
|  | 329     reset_value = 10; | 
|  | 330   } | 
| 324   __ mov(ebx, Immediate(profiling_counter_)); | 331   __ mov(ebx, Immediate(profiling_counter_)); | 
| 325   __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), | 332   __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), | 
| 326          Immediate(Smi::FromInt(reset_value))); | 333          Immediate(Smi::FromInt(reset_value))); | 
| 327 } | 334 } | 
| 328 | 335 | 
| 329 | 336 | 
|  | 337 static const int kMaxBackEdgeWeight = 127; | 
|  | 338 static const int kBackEdgeDistanceDivisor = 100; | 
|  | 339 | 
|  | 340 | 
| 330 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, | 341 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, | 
| 331                                        Label* back_edge_target) { | 342                                        Label* back_edge_target) { | 
| 332   Comment cmnt(masm_, "[ Stack check"); | 343   Comment cmnt(masm_, "[ Stack check"); | 
| 333   Label ok; | 344   Label ok; | 
| 334 | 345 | 
| 335   if (FLAG_count_based_interrupts) { | 346   if (FLAG_count_based_interrupts) { | 
| 336     int weight = 1; | 347     int weight = 1; | 
| 337     if (FLAG_weighted_back_edges) { | 348     if (FLAG_weighted_back_edges) { | 
| 338       ASSERT(back_edge_target->is_bound()); | 349       ASSERT(back_edge_target->is_bound()); | 
| 339       int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 350       int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 
| 340       weight = Min(127, Max(1, distance / 100)); | 351       weight = Min(kMaxBackEdgeWeight, | 
|  | 352                    Max(1, distance / kBackEdgeDistanceDivisor)); | 
| 341     } | 353     } | 
| 342     EmitProfilingCounterDecrement(weight); | 354     EmitProfilingCounterDecrement(weight); | 
| 343     __ j(positive, &ok, Label::kNear); | 355     __ j(positive, &ok, Label::kNear); | 
| 344     InterruptStub stub; | 356     InterruptStub stub; | 
| 345     __ CallStub(&stub); | 357     __ CallStub(&stub); | 
| 346   } else { | 358   } else { | 
| 347     // Count based interrupts happen often enough when they are enabled | 359     // Count based interrupts happen often enough when they are enabled | 
| 348     // that the additional stack checks are not necessary (they would | 360     // that the additional stack checks are not necessary (they would | 
| 349     // only check for interrupts). | 361     // only check for interrupts). | 
| 350     ExternalReference stack_limit = | 362     ExternalReference stack_limit = | 
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 391       __ push(eax); | 403       __ push(eax); | 
| 392       __ CallRuntime(Runtime::kTraceExit, 1); | 404       __ CallRuntime(Runtime::kTraceExit, 1); | 
| 393     } | 405     } | 
| 394     if (FLAG_interrupt_at_exit || FLAG_self_optimization) { | 406     if (FLAG_interrupt_at_exit || FLAG_self_optimization) { | 
| 395       // Pretend that the exit is a backwards jump to the entry. | 407       // Pretend that the exit is a backwards jump to the entry. | 
| 396       int weight = 1; | 408       int weight = 1; | 
| 397       if (info_->ShouldSelfOptimize()) { | 409       if (info_->ShouldSelfOptimize()) { | 
| 398         weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 410         weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 
| 399       } else if (FLAG_weighted_back_edges) { | 411       } else if (FLAG_weighted_back_edges) { | 
| 400         int distance = masm_->pc_offset(); | 412         int distance = masm_->pc_offset(); | 
| 401         weight = Min(127, Max(1, distance / 100)); | 413         weight = Min(kMaxBackEdgeWeight, | 
|  | 414                      Max(1, distance / kBackEdgeDistanceDivisor)); | 
| 402       } | 415       } | 
| 403       EmitProfilingCounterDecrement(weight); | 416       EmitProfilingCounterDecrement(weight); | 
| 404       Label ok; | 417       Label ok; | 
| 405       __ j(positive, &ok, Label::kNear); | 418       __ j(positive, &ok, Label::kNear); | 
| 406       __ push(eax); | 419       __ push(eax); | 
| 407       if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | 420       if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | 
| 408         __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 421         __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 
| 409         __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | 422         __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | 
| 410       } else { | 423       } else { | 
| 411         InterruptStub stub; | 424         InterruptStub stub; | 
| (...skipping 4084 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 4496   *context_length = 0; | 4509   *context_length = 0; | 
| 4497   return previous_; | 4510   return previous_; | 
| 4498 } | 4511 } | 
| 4499 | 4512 | 
| 4500 | 4513 | 
| 4501 #undef __ | 4514 #undef __ | 
| 4502 | 4515 | 
| 4503 } }  // namespace v8::internal | 4516 } }  // namespace v8::internal | 
| 4504 | 4517 | 
| 4505 #endif  // V8_TARGET_ARCH_IA32 | 4518 #endif  // V8_TARGET_ARCH_IA32 | 
| OLD | NEW | 
|---|