OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
120 // frames-ia32.h for its layout. | 120 // frames-ia32.h for its layout. |
121 void FullCodeGenerator::Generate() { | 121 void FullCodeGenerator::Generate() { |
122 CompilationInfo* info = info_; | 122 CompilationInfo* info = info_; |
123 handler_table_ = | 123 handler_table_ = |
124 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); | 124 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); |
125 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( | 125 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( |
126 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget))); | 126 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget))); |
127 SetFunctionPosition(function()); | 127 SetFunctionPosition(function()); |
128 Comment cmnt(masm_, "[ function compiled by full code generator"); | 128 Comment cmnt(masm_, "[ function compiled by full code generator"); |
129 | 129 |
130 // We can optionally optimize based on counters rather than statistical | |
131 // sampling. | |
132 if (info->ShouldSelfOptimize()) { | |
133 if (FLAG_trace_opt_verbose) { | |
134 PrintF("[adding self-optimization header to %s]\n", | |
135 *info->function()->debug_name()->ToCString()); | |
136 } | |
137 has_self_optimization_header_ = true; | |
138 MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell( | |
139 Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt)); | |
140 JSGlobalPropertyCell* cell; | |
141 if (maybe_cell->To(&cell)) { | |
142 __ sub(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)), | |
143 Immediate(Smi::FromInt(1))); | |
144 Handle<Code> compile_stub( | |
145 isolate()->builtins()->builtin(Builtins::kLazyRecompile)); | |
146 STATIC_ASSERT(kSmiTag == 0); | |
147 __ j(zero, compile_stub); | |
148 ASSERT(masm_->pc_offset() == self_optimization_header_size()); | |
149 } | |
150 } | |
151 | |
152 #ifdef DEBUG | 130 #ifdef DEBUG |
153 if (strlen(FLAG_stop_at) > 0 && | 131 if (strlen(FLAG_stop_at) > 0 && |
154 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 132 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
155 __ int3(); | 133 __ int3(); |
156 } | 134 } |
157 #endif | 135 #endif |
158 | 136 |
159 // Strict mode functions and builtins need to replace the receiver | 137 // Strict mode functions and builtins need to replace the receiver |
160 // with undefined when called as functions (without an explicit | 138 // with undefined when called as functions (without an explicit |
161 // receiver object). ecx is zero for method calls and non-zero for | 139 // receiver object). ecx is zero for method calls and non-zero for |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
323 EmitReturnSequence(); | 301 EmitReturnSequence(); |
324 } | 302 } |
325 } | 303 } |
326 | 304 |
327 | 305 |
328 void FullCodeGenerator::ClearAccumulator() { | 306 void FullCodeGenerator::ClearAccumulator() { |
329 __ Set(eax, Immediate(Smi::FromInt(0))); | 307 __ Set(eax, Immediate(Smi::FromInt(0))); |
330 } | 308 } |
331 | 309 |
332 | 310 |
| 311 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
| 312 __ mov(ebx, Immediate(profiling_counter_)); |
| 313 __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), |
| 314 Immediate(Smi::FromInt(delta))); |
| 315 } |
| 316 |
| 317 |
| 318 void FullCodeGenerator::EmitProfilingCounterReset() { |
| 319 int reset_value = FLAG_interrupt_budget; |
| 320 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { |
| 321 // Self-optimization is a one-off thing: if it fails, don't try again. |
| 322 reset_value = Smi::kMaxValue; |
| 323 } |
| 324 __ mov(ebx, Immediate(profiling_counter_)); |
| 325 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), |
| 326 Immediate(Smi::FromInt(reset_value))); |
| 327 } |
| 328 |
| 329 |
333 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, | 330 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, |
334 Label* back_edge_target) { | 331 Label* back_edge_target) { |
335 Comment cmnt(masm_, "[ Stack check"); | 332 Comment cmnt(masm_, "[ Stack check"); |
336 Label ok; | 333 Label ok; |
337 | 334 |
338 if (FLAG_count_based_interrupts) { | 335 if (FLAG_count_based_interrupts) { |
339 int weight = 1; | 336 int weight = 1; |
340 if (FLAG_weighted_back_edges) { | 337 if (FLAG_weighted_back_edges) { |
341 ASSERT(back_edge_target->is_bound()); | 338 ASSERT(back_edge_target->is_bound()); |
342 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 339 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
343 weight = Min(127, Max(1, distance / 100)); | 340 weight = Min(127, Max(1, distance / 100)); |
344 } | 341 } |
345 if (Serializer::enabled()) { | 342 EmitProfilingCounterDecrement(weight); |
346 __ mov(ebx, Immediate(profiling_counter_)); | |
347 __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), | |
348 Immediate(Smi::FromInt(weight))); | |
349 } else { | |
350 // This version is slightly faster, but not snapshot safe. | |
351 __ sub(Operand::Cell(profiling_counter_), | |
352 Immediate(Smi::FromInt(weight))); | |
353 } | |
354 __ j(positive, &ok, Label::kNear); | 343 __ j(positive, &ok, Label::kNear); |
355 InterruptStub stub; | 344 InterruptStub stub; |
356 __ CallStub(&stub); | 345 __ CallStub(&stub); |
357 } else { | 346 } else { |
358 // Count based interrupts happen often enough when they are enabled | 347 // Count based interrupts happen often enough when they are enabled |
359 // that the additional stack checks are not necessary (they would | 348 // that the additional stack checks are not necessary (they would |
360 // only check for interrupts). | 349 // only check for interrupts). |
361 ExternalReference stack_limit = | 350 ExternalReference stack_limit = |
362 ExternalReference::address_of_stack_limit(isolate()); | 351 ExternalReference::address_of_stack_limit(isolate()); |
363 __ cmp(esp, Operand::StaticVariable(stack_limit)); | 352 __ cmp(esp, Operand::StaticVariable(stack_limit)); |
364 __ j(above_equal, &ok, Label::kNear); | 353 __ j(above_equal, &ok, Label::kNear); |
365 StackCheckStub stub; | 354 StackCheckStub stub; |
366 __ CallStub(&stub); | 355 __ CallStub(&stub); |
367 } | 356 } |
368 | 357 |
369 // Record a mapping of this PC offset to the OSR id. This is used to find | 358 // Record a mapping of this PC offset to the OSR id. This is used to find |
370 // the AST id from the unoptimized code in order to use it as a key into | 359 // the AST id from the unoptimized code in order to use it as a key into |
371 // the deoptimization input data found in the optimized code. | 360 // the deoptimization input data found in the optimized code. |
372 RecordStackCheck(stmt->OsrEntryId()); | 361 RecordStackCheck(stmt->OsrEntryId()); |
373 | 362 |
374 // Loop stack checks can be patched to perform on-stack replacement. In | 363 // Loop stack checks can be patched to perform on-stack replacement. In |
375 // order to decide whether or not to perform OSR we embed the loop depth | 364 // order to decide whether or not to perform OSR we embed the loop depth |
376 // in a test instruction after the call so we can extract it from the OSR | 365 // in a test instruction after the call so we can extract it from the OSR |
377 // builtin. | 366 // builtin. |
378 ASSERT(loop_depth() > 0); | 367 ASSERT(loop_depth() > 0); |
379 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker))); | 368 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker))); |
380 | 369 |
381 if (FLAG_count_based_interrupts) { | 370 if (FLAG_count_based_interrupts) { |
382 // Reset the countdown. | 371 EmitProfilingCounterReset(); |
383 if (Serializer::enabled()) { | |
384 __ mov(ebx, Immediate(profiling_counter_)); | |
385 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), | |
386 Immediate(Smi::FromInt(FLAG_interrupt_budget))); | |
387 } else { | |
388 __ mov(Operand::Cell(profiling_counter_), | |
389 Immediate(Smi::FromInt(FLAG_interrupt_budget))); | |
390 } | |
391 } | 372 } |
392 | 373 |
393 __ bind(&ok); | 374 __ bind(&ok); |
394 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 375 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
395 // Record a mapping of the OSR id to this PC. This is used if the OSR | 376 // Record a mapping of the OSR id to this PC. This is used if the OSR |
396 // entry becomes the target of a bailout. We don't expect it to be, but | 377 // entry becomes the target of a bailout. We don't expect it to be, but |
397 // we want it to work if it is. | 378 // we want it to work if it is. |
398 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 379 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
399 } | 380 } |
400 | 381 |
401 | 382 |
402 void FullCodeGenerator::EmitReturnSequence() { | 383 void FullCodeGenerator::EmitReturnSequence() { |
403 Comment cmnt(masm_, "[ Return sequence"); | 384 Comment cmnt(masm_, "[ Return sequence"); |
404 if (return_label_.is_bound()) { | 385 if (return_label_.is_bound()) { |
405 __ jmp(&return_label_); | 386 __ jmp(&return_label_); |
406 } else { | 387 } else { |
407 // Common return label | 388 // Common return label |
408 __ bind(&return_label_); | 389 __ bind(&return_label_); |
409 if (FLAG_trace) { | 390 if (FLAG_trace) { |
410 __ push(eax); | 391 __ push(eax); |
411 __ CallRuntime(Runtime::kTraceExit, 1); | 392 __ CallRuntime(Runtime::kTraceExit, 1); |
412 } | 393 } |
413 if (FLAG_interrupt_at_exit) { | 394 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { |
414 // Pretend that the exit is a backwards jump to the entry. | 395 // Pretend that the exit is a backwards jump to the entry. |
415 int weight = 1; | 396 int weight = 1; |
416 if (FLAG_weighted_back_edges) { | 397 if (info_->ShouldSelfOptimize()) { |
| 398 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 399 } else if (FLAG_weighted_back_edges) { |
417 int distance = masm_->pc_offset(); | 400 int distance = masm_->pc_offset(); |
418 weight = Min(127, Max(1, distance / 100)); | 401 weight = Min(127, Max(1, distance / 100)); |
419 } | 402 } |
420 if (Serializer::enabled()) { | 403 EmitProfilingCounterDecrement(weight); |
421 __ mov(ebx, Immediate(profiling_counter_)); | |
422 __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), | |
423 Immediate(Smi::FromInt(weight))); | |
424 } else { | |
425 // This version is slightly faster, but not snapshot safe. | |
426 __ sub(Operand::Cell(profiling_counter_), | |
427 Immediate(Smi::FromInt(weight))); | |
428 } | |
429 Label ok; | 404 Label ok; |
430 __ j(positive, &ok, Label::kNear); | 405 __ j(positive, &ok, Label::kNear); |
431 __ push(eax); | 406 __ push(eax); |
432 InterruptStub stub; | 407 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { |
433 __ CallStub(&stub); | 408 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
| 409 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); |
| 410 } else { |
| 411 InterruptStub stub; |
| 412 __ CallStub(&stub); |
| 413 } |
434 __ pop(eax); | 414 __ pop(eax); |
435 // Reset the countdown. | 415 EmitProfilingCounterReset(); |
436 if (Serializer::enabled()) { | |
437 __ mov(ebx, Immediate(profiling_counter_)); | |
438 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), | |
439 Immediate(Smi::FromInt(FLAG_interrupt_budget))); | |
440 } else { | |
441 __ mov(Operand::Cell(profiling_counter_), | |
442 Immediate(Smi::FromInt(FLAG_interrupt_budget))); | |
443 } | |
444 __ bind(&ok); | 416 __ bind(&ok); |
445 } | 417 } |
446 #ifdef DEBUG | 418 #ifdef DEBUG |
447 // Add a label for checking the size of the code used for returning. | 419 // Add a label for checking the size of the code used for returning. |
448 Label check_exit_codesize; | 420 Label check_exit_codesize; |
449 masm_->bind(&check_exit_codesize); | 421 masm_->bind(&check_exit_codesize); |
450 #endif | 422 #endif |
451 SetSourcePosition(function()->end_position() - 1); | 423 SetSourcePosition(function()->end_position() - 1); |
452 __ RecordJSReturn(); | 424 __ RecordJSReturn(); |
453 // Do not use the leave instruction here because it is too short to | 425 // Do not use the leave instruction here because it is too short to |
(...skipping 4012 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4466 *context_length = 0; | 4438 *context_length = 0; |
4467 return previous_; | 4439 return previous_; |
4468 } | 4440 } |
4469 | 4441 |
4470 | 4442 |
4471 #undef __ | 4443 #undef __ |
4472 | 4444 |
4473 } } // namespace v8::internal | 4445 } } // namespace v8::internal |
4474 | 4446 |
4475 #endif // V8_TARGET_ARCH_IA32 | 4447 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |