| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 66 ASSERT(patch_site_.is_bound() == info_emitted_); | 66 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 67 } | 67 } |
| 68 | 68 |
| 69 // When initially emitting this ensure that a jump is always generated to skip | 69 // When initially emitting this ensure that a jump is always generated to skip |
| 70 // the inlined smi code. | 70 // the inlined smi code. |
| 71 void EmitJumpIfNotSmi(Register reg, Label* target) { | 71 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 72 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 72 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 73 Assembler::BlockConstPoolScope block_const_pool(masm_); | 73 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 74 __ bind(&patch_site_); | 74 __ bind(&patch_site_); |
| 75 __ cmp(reg, Operand(reg)); | 75 __ cmp(reg, Operand(reg)); |
| 76 // Don't use b(al, ...) as that might emit the constant pool right after the | |
| 77 // branch. After patching when the branch is no longer unconditional | |
| 78 // execution can continue into the constant pool. | |
| 79 __ b(eq, target); // Always taken before patched. | 76 __ b(eq, target); // Always taken before patched. |
| 80 } | 77 } |
| 81 | 78 |
| 82 // When initially emitting this ensure that a jump is never generated to skip | 79 // When initially emitting this ensure that a jump is never generated to skip |
| 83 // the inlined smi code. | 80 // the inlined smi code. |
| 84 void EmitJumpIfSmi(Register reg, Label* target) { | 81 void EmitJumpIfSmi(Register reg, Label* target) { |
| 85 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 82 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 86 Assembler::BlockConstPoolScope block_const_pool(masm_); | 83 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 87 __ bind(&patch_site_); | 84 __ bind(&patch_site_); |
| 88 __ cmp(reg, Operand(reg)); | 85 __ cmp(reg, Operand(reg)); |
| 89 __ b(ne, target); // Never taken before patched. | 86 __ b(ne, target); // Never taken before patched. |
| 90 } | 87 } |
| 91 | 88 |
| 92 void EmitPatchInfo() { | 89 void EmitPatchInfo() { |
| 90 // Block literal pool emission whilst recording patch site information. |
| 91 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 93 if (patch_site_.is_bound()) { | 92 if (patch_site_.is_bound()) { |
| 94 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| 95 Register reg; | 94 Register reg; |
| 96 reg.set_code(delta_to_patch_site / kOff12Mask); | 95 reg.set_code(delta_to_patch_site / kOff12Mask); |
| 97 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); | 96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); |
| 98 #ifdef DEBUG | 97 #ifdef DEBUG |
| 99 info_emitted_ = true; | 98 info_emitted_ = true; |
| 100 #endif | 99 #endif |
| 101 } else { | 100 } else { |
| 102 __ nop(); // Signals no inlined code. | 101 __ nop(); // Signals no inlined code. |
| (...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 } | 336 } |
| 338 | 337 |
| 339 | 338 |
| 340 static const int kMaxBackEdgeWeight = 127; | 339 static const int kMaxBackEdgeWeight = 127; |
| 341 static const int kBackEdgeDistanceDivisor = 142; | 340 static const int kBackEdgeDistanceDivisor = 142; |
| 342 | 341 |
| 343 | 342 |
| 344 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, | 343 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, |
| 345 Label* back_edge_target) { | 344 Label* back_edge_target) { |
| 346 Comment cmnt(masm_, "[ Stack check"); | 345 Comment cmnt(masm_, "[ Stack check"); |
| 346 // Block literal pools whilst emitting stack check code. |
| 347 Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 347 Label ok; | 348 Label ok; |
| 348 | 349 |
| 349 if (FLAG_count_based_interrupts) { | 350 if (FLAG_count_based_interrupts) { |
| 350 int weight = 1; | 351 int weight = 1; |
| 351 if (FLAG_weighted_back_edges) { | 352 if (FLAG_weighted_back_edges) { |
| 352 ASSERT(back_edge_target->is_bound()); | 353 ASSERT(back_edge_target->is_bound()); |
| 353 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 354 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
| 354 weight = Min(kMaxBackEdgeWeight, | 355 weight = Min(kMaxBackEdgeWeight, |
| 355 Max(1, distance / kBackEdgeDistanceDivisor)); | 356 Max(1, distance / kBackEdgeDistanceDivisor)); |
| 356 } | 357 } |
| (...skipping 4177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4534 *context_length = 0; | 4535 *context_length = 0; |
| 4535 return previous_; | 4536 return previous_; |
| 4536 } | 4537 } |
| 4537 | 4538 |
| 4538 | 4539 |
| 4539 #undef __ | 4540 #undef __ |
| 4540 | 4541 |
| 4541 } } // namespace v8::internal | 4542 } } // namespace v8::internal |
| 4542 | 4543 |
| 4543 #endif // V8_TARGET_ARCH_ARM | 4544 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |