| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 24 matching lines...) Expand all Loading... |
| 35 | 35 |
| 36 namespace v8 { | 36 namespace v8 { |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 void OptimizingCompilerThread::Run() { | 40 void OptimizingCompilerThread::Run() { |
| 41 #ifdef DEBUG | 41 #ifdef DEBUG |
| 42 thread_id_ = ThreadId::Current().ToInteger(); | 42 thread_id_ = ThreadId::Current().ToInteger(); |
| 43 #endif | 43 #endif |
| 44 Isolate::SetIsolateThreadLocals(isolate_, NULL); | 44 Isolate::SetIsolateThreadLocals(isolate_, NULL); |
| 45 DisallowHeapAllocation no_allocation; |
| 46 DisallowHandleAllocation no_handles; |
| 47 DisallowHandleDereference no_deref; |
| 45 | 48 |
| 46 int64_t epoch = 0; | 49 int64_t epoch = 0; |
| 47 if (FLAG_trace_parallel_recompilation) epoch = OS::Ticks(); | 50 if (FLAG_trace_parallel_recompilation) epoch = OS::Ticks(); |
| 48 | 51 |
| 49 while (true) { | 52 while (true) { |
| 50 input_queue_semaphore_->Wait(); | 53 input_queue_semaphore_->Wait(); |
| 51 Logger::TimerEventScope timer( | 54 Logger::TimerEventScope timer( |
| 52 isolate_, Logger::TimerEventScope::v8_recompile_parallel); | 55 isolate_, Logger::TimerEventScope::v8_recompile_parallel); |
| 53 | 56 |
| 54 if (FLAG_parallel_recompilation_delay != 0) { | 57 if (FLAG_parallel_recompilation_delay != 0) { |
| (...skipping 27 matching lines...) Expand all Loading... |
| 82 | 85 |
| 83 // The function may have already been optimized by OSR. Simply continue. | 86 // The function may have already been optimized by OSR. Simply continue. |
| 84 OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph(); | 87 OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph(); |
| 85 USE(status); // Prevent an unused-variable error in release mode. | 88 USE(status); // Prevent an unused-variable error in release mode. |
| 86 ASSERT(status != OptimizingCompiler::FAILED); | 89 ASSERT(status != OptimizingCompiler::FAILED); |
| 87 | 90 |
| 88 // The function may have already been optimized by OSR. Simply continue. | 91 // The function may have already been optimized by OSR. Simply continue. |
| 89 // Mark it for installing before queuing so that we can be sure of the write | 92 // Mark it for installing before queuing so that we can be sure of the write |
| 90 // order: marking first and (after being queued) installing code second. | 93 // order: marking first and (after being queued) installing code second. |
| 91 { Heap::RelocationLock relocation_lock(isolate_->heap()); | 94 { Heap::RelocationLock relocation_lock(isolate_->heap()); |
| 95 AllowHandleDereference ahd; |
| 92 optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode(); | 96 optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode(); |
| 93 } | 97 } |
| 94 output_queue_.Enqueue(optimizing_compiler); | 98 output_queue_.Enqueue(optimizing_compiler); |
| 95 } | 99 } |
| 96 | 100 |
| 97 | 101 |
| 98 void OptimizingCompilerThread::Stop() { | 102 void OptimizingCompilerThread::Stop() { |
| 99 ASSERT(!IsOptimizerThread()); | 103 ASSERT(!IsOptimizerThread()); |
| 100 Release_Store(&stop_thread_, static_cast<AtomicWord>(true)); | 104 Release_Store(&stop_thread_, static_cast<AtomicWord>(true)); |
| 101 input_queue_semaphore_->Signal(); | 105 input_queue_semaphore_->Signal(); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 146 | 150 |
| 147 #ifdef DEBUG | 151 #ifdef DEBUG |
| 148 bool OptimizingCompilerThread::IsOptimizerThread() { | 152 bool OptimizingCompilerThread::IsOptimizerThread() { |
| 149 if (!FLAG_parallel_recompilation) return false; | 153 if (!FLAG_parallel_recompilation) return false; |
| 150 return ThreadId::Current().ToInteger() == thread_id_; | 154 return ThreadId::Current().ToInteger() == thread_id_; |
| 151 } | 155 } |
| 152 #endif | 156 #endif |
| 153 | 157 |
| 154 | 158 |
| 155 } } // namespace v8::internal | 159 } } // namespace v8::internal |
| OLD | NEW |