| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 333 if (isolate()->IsDebuggerActive()) { | 333 if (isolate()->IsDebuggerActive()) { |
| 334 // Detect debug break requests as soon as possible. | 334 // Detect debug break requests as soon as possible. |
| 335 reset_value = FLAG_interrupt_budget >> 4; | 335 reset_value = FLAG_interrupt_budget >> 4; |
| 336 } | 336 } |
| 337 __ li(a2, Operand(profiling_counter_)); | 337 __ li(a2, Operand(profiling_counter_)); |
| 338 __ li(a3, Operand(Smi::FromInt(reset_value))); | 338 __ li(a3, Operand(Smi::FromInt(reset_value))); |
| 339 __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); | 339 __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); |
| 340 } | 340 } |
| 341 | 341 |
| 342 | 342 |
| 343 static const int kMaxBackEdgeWeight = 127; | 343 const int FullCodeGenerator::kMaxBackEdgeWeight = 127; |
| 344 static const int kBackEdgeDistanceDivisor = 142; | 344 const int FullCodeGenerator::kBackEdgeDistanceUnit = 142; |
| 345 | 345 |
| 346 | 346 |
| 347 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, | 347 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, |
| 348 Label* back_edge_target) { | 348 Label* back_edge_target) { |
| 349 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need | 349 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need |
| 350 // to make sure it is constant. Branch may emit a skip-or-jump sequence | 350 // to make sure it is constant. Branch may emit a skip-or-jump sequence |
| 351 // instead of the normal Branch. It seems that the "skip" part of that | 351 // instead of the normal Branch. It seems that the "skip" part of that |
| 352 // sequence is about as long as this Branch would be so it is safe to ignore | 352 // sequence is about as long as this Branch would be so it is safe to ignore |
| 353 // that. | 353 // that. |
| 354 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 354 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 355 Comment cmnt(masm_, "[ Stack check"); | 355 Comment cmnt(masm_, "[ Stack check"); |
| 356 Label ok; | 356 Label ok; |
| 357 if (FLAG_count_based_interrupts) { | 357 if (FLAG_count_based_interrupts) { |
| 358 int weight = 1; | 358 int weight = 1; |
| 359 if (FLAG_weighted_back_edges) { | 359 if (FLAG_weighted_back_edges) { |
| 360 ASSERT(back_edge_target->is_bound()); | 360 ASSERT(back_edge_target->is_bound()); |
| 361 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 361 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
| 362 weight = Min(kMaxBackEdgeWeight, | 362 weight = Min(kMaxBackEdgeWeight, |
| 363 Max(1, distance / kBackEdgeDistanceDivisor)); | 363 Max(1, distance / kBackEdgeDistanceUnit)); |
| 364 } | 364 } |
| 365 EmitProfilingCounterDecrement(weight); | 365 EmitProfilingCounterDecrement(weight); |
| 366 __ slt(at, a3, zero_reg); | 366 __ slt(at, a3, zero_reg); |
| 367 __ beq(at, zero_reg, &ok); | 367 __ beq(at, zero_reg, &ok); |
| 368 // CallStub will emit a li t9 first, so it is safe to use the delay slot. | 368 // CallStub will emit a li t9 first, so it is safe to use the delay slot. |
| 369 InterruptStub stub; | 369 InterruptStub stub; |
| 370 __ CallStub(&stub); | 370 __ CallStub(&stub); |
| 371 } else { | 371 } else { |
| 372 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 372 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 373 __ sltu(at, sp, t0); | 373 __ sltu(at, sp, t0); |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 406 __ CallRuntime(Runtime::kTraceExit, 1); | 406 __ CallRuntime(Runtime::kTraceExit, 1); |
| 407 } | 407 } |
| 408 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { | 408 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { |
| 409 // Pretend that the exit is a backwards jump to the entry. | 409 // Pretend that the exit is a backwards jump to the entry. |
| 410 int weight = 1; | 410 int weight = 1; |
| 411 if (info_->ShouldSelfOptimize()) { | 411 if (info_->ShouldSelfOptimize()) { |
| 412 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 412 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 413 } else if (FLAG_weighted_back_edges) { | 413 } else if (FLAG_weighted_back_edges) { |
| 414 int distance = masm_->pc_offset(); | 414 int distance = masm_->pc_offset(); |
| 415 weight = Min(kMaxBackEdgeWeight, | 415 weight = Min(kMaxBackEdgeWeight, |
| 416 Max(1, distance / kBackEdgeDistanceDivisor)); | 416 Max(1, distance / kBackEdgeDistanceUnit)); |
| 417 } | 417 } |
| 418 EmitProfilingCounterDecrement(weight); | 418 EmitProfilingCounterDecrement(weight); |
| 419 Label ok; | 419 Label ok; |
| 420 __ Branch(&ok, ge, a3, Operand(zero_reg)); | 420 __ Branch(&ok, ge, a3, Operand(zero_reg)); |
| 421 __ push(v0); | 421 __ push(v0); |
| 422 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | 422 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { |
| 423 __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 423 __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 424 __ push(a2); | 424 __ push(a2); |
| 425 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | 425 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); |
| 426 } else { | 426 } else { |
| (...skipping 4189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4616 *context_length = 0; | 4616 *context_length = 0; |
| 4617 return previous_; | 4617 return previous_; |
| 4618 } | 4618 } |
| 4619 | 4619 |
| 4620 | 4620 |
| 4621 #undef __ | 4621 #undef __ |
| 4622 | 4622 |
| 4623 } } // namespace v8::internal | 4623 } } // namespace v8::internal |
| 4624 | 4624 |
| 4625 #endif // V8_TARGET_ARCH_MIPS | 4625 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |