| Index: src/lithium-allocator.cc
|
| diff --git a/src/lithium-allocator.cc b/src/lithium-allocator.cc
|
| index 9534f9e05a561ea2e83865252ad0a9e5b7a7b1b7..bbc405ba0bb5947123da521b24b19438ea8d6c62 100644
|
| --- a/src/lithium-allocator.cc
|
| +++ b/src/lithium-allocator.cc
|
| @@ -230,9 +230,9 @@ LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
|
| if (HasRegisterAssigned()) {
|
| ASSERT(!IsSpilled());
|
| if (IsDouble()) {
|
| - op = LDoubleRegister::Create(assigned_register());
|
| + op = LDoubleRegister::Create(assigned_register(), zone);
|
| } else {
|
| - op = LRegister::Create(assigned_register());
|
| + op = LRegister::Create(assigned_register(), zone);
|
| }
|
| } else if (IsSpilled()) {
|
| ASSERT(!HasRegisterAssigned());
|
| @@ -533,14 +533,14 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
|
| LAllocator::LAllocator(int num_values, HGraph* graph)
|
| : zone_(graph->zone()),
|
| chunk_(NULL),
|
| - live_in_sets_(graph->blocks()->length()),
|
| - live_ranges_(num_values * 2),
|
| + live_in_sets_(graph->blocks()->length(), zone_),
|
| + live_ranges_(num_values * 2, zone_),
|
| fixed_live_ranges_(NULL),
|
| fixed_double_live_ranges_(NULL),
|
| - unhandled_live_ranges_(num_values * 2),
|
| - active_live_ranges_(8),
|
| - inactive_live_ranges_(8),
|
| - reusable_slots_(8),
|
| + unhandled_live_ranges_(num_values * 2, zone_),
|
| + active_live_ranges_(8, zone_),
|
| + inactive_live_ranges_(8, zone_),
|
| + reusable_slots_(8, zone_),
|
| next_virtual_register_(num_values),
|
| first_artificial_register_(num_values),
|
| mode_(GENERAL_REGISTERS),
|
| @@ -553,8 +553,8 @@ LAllocator::LAllocator(int num_values, HGraph* graph)
|
| void LAllocator::InitializeLivenessAnalysis() {
|
| // Initialize the live_in sets for each block to NULL.
|
| int block_count = graph_->blocks()->length();
|
| - live_in_sets_.Initialize(block_count);
|
| - live_in_sets_.AddBlock(NULL, block_count);
|
| + live_in_sets_.Initialize(block_count, zone());
|
| + live_in_sets_.AddBlock(NULL, block_count, zone());
|
| }
|
|
|
|
|
| @@ -630,7 +630,7 @@ LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
|
| TraceAlloc("Fixed reg is tagged at %d\n", pos);
|
| LInstruction* instr = InstructionAt(pos);
|
| if (instr->HasPointerMap()) {
|
| - instr->pointer_map()->RecordPointer(operand);
|
| + instr->pointer_map()->RecordPointer(operand, zone());
|
| }
|
| }
|
| return operand;
|
| @@ -665,7 +665,7 @@ LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
|
|
|
| LiveRange* LAllocator::LiveRangeFor(int index) {
|
| if (index >= live_ranges_.length()) {
|
| - live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
|
| + live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
|
| }
|
| LiveRange* result = live_ranges_[index];
|
| if (result == NULL) {
|
| @@ -746,7 +746,7 @@ void LAllocator::AddConstraintsGapMove(int index,
|
| LOperand* from,
|
| LOperand* to) {
|
| LGap* gap = GapAt(index);
|
| - LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
| + LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
|
| if (from->IsUnallocated()) {
|
| const ZoneList<LMoveOperands>* move_operands = move->move_operands();
|
| for (int i = 0; i < move_operands->length(); ++i) {
|
| @@ -755,13 +755,13 @@ void LAllocator::AddConstraintsGapMove(int index,
|
| if (cur_to->IsUnallocated()) {
|
| if (LUnallocated::cast(cur_to)->virtual_register() ==
|
| LUnallocated::cast(from)->virtual_register()) {
|
| - move->AddMove(cur.source(), to);
|
| + move->AddMove(cur.source(), to, zone());
|
| return;
|
| }
|
| }
|
| }
|
| }
|
| - move->AddMove(from, to);
|
| + move->AddMove(from, to, zone());
|
| }
|
|
|
|
|
| @@ -800,7 +800,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| LiveRange* range = LiveRangeFor(first_output->virtual_register());
|
| bool assigned = false;
|
| if (first_output->HasFixedPolicy()) {
|
| - LUnallocated* output_copy = first_output->CopyUnconstrained();
|
| + LUnallocated* output_copy = first_output->CopyUnconstrained(zone());
|
| bool is_tagged = HasTaggedValue(first_output->virtual_register());
|
| AllocateFixed(first_output, gap_index, is_tagged);
|
|
|
| @@ -821,8 +821,8 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| // Thus it should be inserted to a lifetime position corresponding to
|
| // the instruction end.
|
| LGap* gap = GapAt(gap_index);
|
| - LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
|
| - move->AddMove(first_output, range->GetSpillOperand());
|
| + LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
|
| + move->AddMove(first_output, range->GetSpillOperand(), zone());
|
| }
|
| }
|
|
|
| @@ -831,7 +831,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| for (UseIterator it(second); !it.Done(); it.Advance()) {
|
| LUnallocated* cur_input = LUnallocated::cast(it.Current());
|
| if (cur_input->HasFixedPolicy()) {
|
| - LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
| + LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
|
| bool is_tagged = HasTaggedValue(cur_input->virtual_register());
|
| AllocateFixed(cur_input, gap_index + 1, is_tagged);
|
| AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
| @@ -840,7 +840,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| // of the instruction.
|
| ASSERT(!cur_input->IsUsedAtStart());
|
|
|
| - LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
| + LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
|
| cur_input->set_virtual_register(GetVirtualRegister());
|
| if (!AllocationOk()) return;
|
|
|
| @@ -864,7 +864,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| int output_vreg = second_output->virtual_register();
|
| int input_vreg = cur_input->virtual_register();
|
|
|
| - LUnallocated* input_copy = cur_input->CopyUnconstrained();
|
| + LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
|
| cur_input->set_virtual_register(second_output->virtual_register());
|
| AddConstraintsGapMove(gap_index, input_copy, cur_input);
|
|
|
| @@ -872,7 +872,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
|
| int index = gap_index + 1;
|
| LInstruction* instr = InstructionAt(index);
|
| if (instr->HasPointerMap()) {
|
| - instr->pointer_map()->RecordPointer(input_copy);
|
| + instr->pointer_map()->RecordPointer(input_copy, zone());
|
| }
|
| } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
|
| // The input is assumed to immediately have a tagged representation,
|
| @@ -901,7 +901,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
|
| if (IsGapAt(index)) {
|
| // We have a gap at this position.
|
| LGap* gap = GapAt(index);
|
| - LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
| + LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
|
| const ZoneList<LMoveOperands>* move_operands = move->move_operands();
|
| for (int i = 0; i < move_operands->length(); ++i) {
|
| LMoveOperands* cur = &move_operands->at(i);
|
| @@ -1046,17 +1046,17 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
|
| InstructionAt(cur_block->last_instruction_index());
|
| if (branch->HasPointerMap()) {
|
| if (phi->representation().IsTagged()) {
|
| - branch->pointer_map()->RecordPointer(phi_operand);
|
| + branch->pointer_map()->RecordPointer(phi_operand, zone());
|
| } else if (!phi->representation().IsDouble()) {
|
| - branch->pointer_map()->RecordUntagged(phi_operand);
|
| + branch->pointer_map()->RecordUntagged(phi_operand, zone());
|
| }
|
| }
|
| }
|
|
|
| LiveRange* live_range = LiveRangeFor(phi->id());
|
| LLabel* label = chunk_->GetLabel(phi->block()->block_id());
|
| - label->GetOrCreateParallelMove(LGap::START)->
|
| - AddMove(phi_operand, live_range->GetSpillOperand());
|
| + label->GetOrCreateParallelMove(LGap::START, zone())->
|
| + AddMove(phi_operand, live_range->GetSpillOperand(), zone());
|
| live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
|
| }
|
| }
|
| @@ -1151,14 +1151,15 @@ void LAllocator::ResolveControlFlow(LiveRange* range,
|
| LInstruction* branch = InstructionAt(pred->last_instruction_index());
|
| if (branch->HasPointerMap()) {
|
| if (HasTaggedValue(range->id())) {
|
| - branch->pointer_map()->RecordPointer(cur_op);
|
| + branch->pointer_map()->RecordPointer(cur_op, zone());
|
| } else if (!cur_op->IsDoubleStackSlot() &&
|
| !cur_op->IsDoubleRegister()) {
|
| branch->pointer_map()->RemovePointer(cur_op);
|
| }
|
| }
|
| }
|
| - gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
|
| + gap->GetOrCreateParallelMove(
|
| + LGap::START, zone())->AddMove(pred_op, cur_op, zone());
|
| }
|
| }
|
| }
|
| @@ -1169,11 +1170,11 @@ LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
|
| if (IsGapAt(index)) {
|
| LGap* gap = GapAt(index);
|
| return gap->GetOrCreateParallelMove(
|
| - pos.IsInstructionStart() ? LGap::START : LGap::END);
|
| + pos.IsInstructionStart() ? LGap::START : LGap::END, zone());
|
| }
|
| int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
|
| return GapAt(gap_pos)->GetOrCreateParallelMove(
|
| - (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
|
| + (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, zone());
|
| }
|
|
|
|
|
| @@ -1205,7 +1206,7 @@ void LAllocator::ConnectRanges() {
|
| LParallelMove* move = GetConnectingParallelMove(pos);
|
| LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
|
| LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
|
| - move->AddMove(prev_operand, cur_operand);
|
| + move->AddMove(prev_operand, cur_operand, zone());
|
| }
|
| }
|
| }
|
| @@ -1270,7 +1271,7 @@ void LAllocator::BuildLiveRanges() {
|
| LOperand* hint = NULL;
|
| LOperand* phi_operand = NULL;
|
| LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
|
| - LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
|
| + LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
|
| for (int j = 0; j < move->move_operands()->length(); ++j) {
|
| LOperand* to = move->move_operands()->at(j).destination();
|
| if (to->IsUnallocated() &&
|
| @@ -1421,7 +1422,7 @@ void LAllocator::PopulatePointerMaps() {
|
| safe_point >= range->spill_start_index()) {
|
| TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
|
| range->id(), range->spill_start_index(), safe_point);
|
| - map->RecordPointer(range->GetSpillOperand());
|
| + map->RecordPointer(range->GetSpillOperand(), zone());
|
| }
|
|
|
| if (!cur->IsSpilled()) {
|
| @@ -1430,7 +1431,7 @@ void LAllocator::PopulatePointerMaps() {
|
| cur->id(), cur->Start().Value(), safe_point);
|
| LOperand* operand = cur->CreateAssignedOperand(zone_);
|
| ASSERT(!operand->IsStackSlot());
|
| - map->RecordPointer(operand);
|
| + map->RecordPointer(operand, zone());
|
| }
|
| }
|
| }
|
| @@ -1632,13 +1633,13 @@ RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
|
|
|
| void LAllocator::AddToActive(LiveRange* range) {
|
| TraceAlloc("Add live range %d to active\n", range->id());
|
| - active_live_ranges_.Add(range);
|
| + active_live_ranges_.Add(range, zone());
|
| }
|
|
|
|
|
| void LAllocator::AddToInactive(LiveRange* range) {
|
| TraceAlloc("Add live range %d to inactive\n", range->id());
|
| - inactive_live_ranges_.Add(range);
|
| + inactive_live_ranges_.Add(range, zone());
|
| }
|
|
|
|
|
| @@ -1649,13 +1650,13 @@ void LAllocator::AddToUnhandledSorted(LiveRange* range) {
|
| LiveRange* cur_range = unhandled_live_ranges_.at(i);
|
| if (range->ShouldBeAllocatedBefore(cur_range)) {
|
| TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
|
| - unhandled_live_ranges_.InsertAt(i + 1, range);
|
| + unhandled_live_ranges_.InsertAt(i + 1, range, zone());
|
| ASSERT(UnhandledIsSorted());
|
| return;
|
| }
|
| }
|
| TraceAlloc("Add live range %d to unhandled at start\n", range->id());
|
| - unhandled_live_ranges_.InsertAt(0, range);
|
| + unhandled_live_ranges_.InsertAt(0, range, zone());
|
| ASSERT(UnhandledIsSorted());
|
| }
|
|
|
| @@ -1664,7 +1665,7 @@ void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
|
| if (range == NULL || range->IsEmpty()) return;
|
| ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
|
| TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
|
| - unhandled_live_ranges_.Add(range);
|
| + unhandled_live_ranges_.Add(range, zone());
|
| }
|
|
|
|
|
| @@ -1705,7 +1706,7 @@ void LAllocator::FreeSpillSlot(LiveRange* range) {
|
|
|
| int index = range->TopLevel()->GetSpillOperand()->index();
|
| if (index >= 0) {
|
| - reusable_slots_.Add(range);
|
| + reusable_slots_.Add(range, zone());
|
| }
|
| }
|
|
|
| @@ -1733,7 +1734,7 @@ void LAllocator::ActiveToHandled(LiveRange* range) {
|
| void LAllocator::ActiveToInactive(LiveRange* range) {
|
| ASSERT(active_live_ranges_.Contains(range));
|
| active_live_ranges_.RemoveElement(range);
|
| - inactive_live_ranges_.Add(range);
|
| + inactive_live_ranges_.Add(range, zone());
|
| TraceAlloc("Moving live range %d from active to inactive\n", range->id());
|
| }
|
|
|
| @@ -1749,7 +1750,7 @@ void LAllocator::InactiveToHandled(LiveRange* range) {
|
| void LAllocator::InactiveToActive(LiveRange* range) {
|
| ASSERT(inactive_live_ranges_.Contains(range));
|
| inactive_live_ranges_.RemoveElement(range);
|
| - active_live_ranges_.Add(range);
|
| + active_live_ranges_.Add(range, zone());
|
| TraceAlloc("Moving live range %d from inactive to active\n", range->id());
|
| }
|
|
|
|
|