| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3746 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3757 | 3757 |
| 3758 void Code::set_stack_check_table_offset(unsigned offset) { | 3758 void Code::set_stack_check_table_offset(unsigned offset) { |
| 3759 ASSERT_EQ(FUNCTION, kind()); | 3759 ASSERT_EQ(FUNCTION, kind()); |
| 3760 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize))); | 3760 ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize))); |
| 3761 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); | 3761 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| 3762 int updated = StackCheckTableOffsetField::update(previous, offset); | 3762 int updated = StackCheckTableOffsetField::update(previous, offset); |
| 3763 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); | 3763 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| 3764 } | 3764 } |
| 3765 | 3765 |
| 3766 | 3766 |
| 3767 bool Code::stack_check_patched_for_osr() { |
| 3768 ASSERT_EQ(FUNCTION, kind()); |
| 3769 return StackCheckPatchedForOSRField::decode( |
| 3770 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)); |
| 3771 } |
| 3772 |
| 3773 |
| 3774 void Code::set_stack_check_patched_for_osr(bool value) { |
| 3775 ASSERT_EQ(FUNCTION, kind()); |
| 3776 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset); |
| 3777 int updated = StackCheckPatchedForOSRField::update(previous, value); |
| 3778 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated); |
| 3779 } |
| 3780 |
| 3781 |
| 3782 |
| 3767 CheckType Code::check_type() { | 3783 CheckType Code::check_type() { |
| 3768 ASSERT(is_call_stub() || is_keyed_call_stub()); | 3784 ASSERT(is_call_stub() || is_keyed_call_stub()); |
| 3769 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset); | 3785 byte type = READ_BYTE_FIELD(this, kCheckTypeOffset); |
| 3770 return static_cast<CheckType>(type); | 3786 return static_cast<CheckType>(type); |
| 3771 } | 3787 } |
| 3772 | 3788 |
| 3773 | 3789 |
| 3774 void Code::set_check_type(CheckType value) { | 3790 void Code::set_check_type(CheckType value) { |
| 3775 ASSERT(is_call_stub() || is_keyed_call_stub()); | 3791 ASSERT(is_call_stub() || is_keyed_call_stub()); |
| 3776 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value); | 3792 WRITE_BYTE_FIELD(this, kCheckTypeOffset, value); |
| (...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4289 kHiddenPrototypeBit) | 4305 kHiddenPrototypeBit) |
| 4290 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) | 4306 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit) |
| 4291 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, | 4307 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check, |
| 4292 kNeedsAccessCheckBit) | 4308 kNeedsAccessCheckBit) |
| 4293 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype, | 4309 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype, |
| 4294 kReadOnlyPrototypeBit) | 4310 kReadOnlyPrototypeBit) |
| 4295 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression, | 4311 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression, |
| 4296 kIsExpressionBit) | 4312 kIsExpressionBit) |
| 4297 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel, | 4313 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel, |
| 4298 kIsTopLevelBit) | 4314 kIsTopLevelBit) |
| 4315 |
| 4299 BOOL_GETTER(SharedFunctionInfo, | 4316 BOOL_GETTER(SharedFunctionInfo, |
| 4300 compiler_hints, | 4317 compiler_hints, |
| 4301 has_only_simple_this_property_assignments, | 4318 has_only_simple_this_property_assignments, |
| 4302 kHasOnlySimpleThisPropertyAssignments) | 4319 kHasOnlySimpleThisPropertyAssignments) |
| 4303 BOOL_ACCESSORS(SharedFunctionInfo, | 4320 BOOL_ACCESSORS(SharedFunctionInfo, |
| 4304 compiler_hints, | 4321 compiler_hints, |
| 4305 allows_lazy_compilation, | 4322 allows_lazy_compilation, |
| 4306 kAllowLazyCompilation) | 4323 kAllowLazyCompilation) |
| 4307 BOOL_ACCESSORS(SharedFunctionInfo, | 4324 BOOL_ACCESSORS(SharedFunctionInfo, |
| 4308 compiler_hints, | 4325 compiler_hints, |
| (...skipping 386 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4695 bool JSFunction::IsOptimizable() { | 4712 bool JSFunction::IsOptimizable() { |
| 4696 return code()->kind() == Code::FUNCTION && code()->optimizable(); | 4713 return code()->kind() == Code::FUNCTION && code()->optimizable(); |
| 4697 } | 4714 } |
| 4698 | 4715 |
| 4699 | 4716 |
| 4700 bool JSFunction::IsMarkedForLazyRecompilation() { | 4717 bool JSFunction::IsMarkedForLazyRecompilation() { |
| 4701 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile); | 4718 return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile); |
| 4702 } | 4719 } |
| 4703 | 4720 |
| 4704 | 4721 |
| 4722 bool JSFunction::IsMarkedForInstallingRecompiledCode() { |
| 4723 return code() == GetIsolate()->builtins()->builtin( |
| 4724 Builtins::kInstallRecompiledCode); |
| 4725 } |
| 4726 |
| 4727 |
| 4705 bool JSFunction::IsMarkedForParallelRecompilation() { | 4728 bool JSFunction::IsMarkedForParallelRecompilation() { |
| 4706 return code() == | 4729 return code() == GetIsolate()->builtins()->builtin( |
| 4707 GetIsolate()->builtins()->builtin(Builtins::kParallelRecompile); | 4730 Builtins::kParallelRecompile); |
| 4708 } | 4731 } |
| 4709 | 4732 |
| 4710 | 4733 |
| 4711 bool JSFunction::IsInRecompileQueue() { | 4734 bool JSFunction::IsInRecompileQueue() { |
| 4712 return code() == GetIsolate()->builtins()->builtin( | 4735 return code() == GetIsolate()->builtins()->builtin( |
| 4713 Builtins::kInRecompileQueue); | 4736 Builtins::kInRecompileQueue); |
| 4714 } | 4737 } |
| 4715 | 4738 |
| 4716 | 4739 |
| 4717 Code* JSFunction::code() { | 4740 Code* JSFunction::code() { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4729 ASSERT(!HEAP->InNewSpace(value)); | 4752 ASSERT(!HEAP->InNewSpace(value)); |
| 4730 Address entry = value->entry(); | 4753 Address entry = value->entry(); |
| 4731 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); | 4754 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); |
| 4732 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry( | 4755 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry( |
| 4733 this, | 4756 this, |
| 4734 HeapObject::RawField(this, kCodeEntryOffset), | 4757 HeapObject::RawField(this, kCodeEntryOffset), |
| 4735 value); | 4758 value); |
| 4736 } | 4759 } |
| 4737 | 4760 |
| 4738 | 4761 |
| 4762 void JSFunction::set_code_no_write_barrier(Code* value) { |
| 4763 ASSERT(!HEAP->InNewSpace(value)); |
| 4764 Address entry = value->entry(); |
| 4765 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); |
| 4766 } |
| 4767 |
| 4768 |
| 4739 void JSFunction::ReplaceCode(Code* code) { | 4769 void JSFunction::ReplaceCode(Code* code) { |
| 4740 bool was_optimized = IsOptimized(); | 4770 bool was_optimized = IsOptimized(); |
| 4741 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; | 4771 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; |
| 4742 | 4772 |
| 4743 set_code(code); | 4773 set_code(code); |
| 4744 | 4774 |
| 4745 // Add/remove the function from the list of optimized functions for this | 4775 // Add/remove the function from the list of optimized functions for this |
| 4746 // context based on the state change. | 4776 // context based on the state change. |
| 4747 if (!was_optimized && is_optimized) { | 4777 if (!was_optimized && is_optimized) { |
| 4748 context()->native_context()->AddOptimizedFunction(this); | 4778 context()->native_context()->AddOptimizedFunction(this); |
| (...skipping 1231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5980 #undef WRITE_UINT32_FIELD | 6010 #undef WRITE_UINT32_FIELD |
| 5981 #undef READ_SHORT_FIELD | 6011 #undef READ_SHORT_FIELD |
| 5982 #undef WRITE_SHORT_FIELD | 6012 #undef WRITE_SHORT_FIELD |
| 5983 #undef READ_BYTE_FIELD | 6013 #undef READ_BYTE_FIELD |
| 5984 #undef WRITE_BYTE_FIELD | 6014 #undef WRITE_BYTE_FIELD |
| 5985 | 6015 |
| 5986 | 6016 |
| 5987 } } // namespace v8::internal | 6017 } } // namespace v8::internal |
| 5988 | 6018 |
| 5989 #endif // V8_OBJECTS_INL_H_ | 6019 #endif // V8_OBJECTS_INL_H_ |
| OLD | NEW |