OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3249 ZoneList<Expression*>* args = expr->arguments(); | 3249 ZoneList<Expression*>* args = expr->arguments(); |
3250 ASSERT(args->length() == 3); | 3250 ASSERT(args->length() == 3); |
3251 VisitForStackValue(args->at(0)); | 3251 VisitForStackValue(args->at(0)); |
3252 VisitForStackValue(args->at(1)); | 3252 VisitForStackValue(args->at(1)); |
3253 VisitForStackValue(args->at(2)); | 3253 VisitForStackValue(args->at(2)); |
3254 __ CallStub(&stub); | 3254 __ CallStub(&stub); |
3255 context()->Plug(v0); | 3255 context()->Plug(v0); |
3256 } | 3256 } |
3257 | 3257 |
3258 | 3258 |
3259 void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) { | |
3260 ZoneList<Expression*>* args = expr->arguments(); | |
3261 ASSERT(args->length() == 3); | |
3262 VisitForStackValue(args->at(0)); | |
3263 VisitForStackValue(args->at(1)); | |
3264 VisitForStackValue(args->at(2)); | |
3265 Label done; | |
3266 Label slow_case; | |
3267 Register object = a0; | |
3268 Register index1 = a1; | |
3269 Register index2 = a2; | |
3270 Register elements = a3; | |
3271 Register scratch1 = t0; | |
3272 Register scratch2 = t1; | |
3273 | |
3274 __ lw(object, MemOperand(sp, 2 * kPointerSize)); | |
3275 // Fetch the map and check if array is in fast case. | |
3276 // Check that object doesn't require security checks and | |
3277 // has no indexed interceptor. | |
3278 __ GetObjectType(object, scratch1, scratch2); | |
3279 __ Branch(&slow_case, ne, scratch2, Operand(JS_ARRAY_TYPE)); | |
3280 // Map is now in scratch1. | |
3281 | |
3282 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset)); | |
3283 __ And(scratch2, scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | |
3284 __ Branch(&slow_case, ne, scratch2, Operand(zero_reg)); | |
3285 | |
3286 // Check the object's elements are in fast case and writable. | |
3287 __ lw(elements, FieldMemOperand(object, JSObject::kElementsOffset)); | |
3288 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); | |
3289 __ LoadRoot(scratch2, Heap::kFixedArrayMapRootIndex); | |
3290 __ Branch(&slow_case, ne, scratch1, Operand(scratch2)); | |
3291 | |
3292 // Check that both indices are smis. | |
3293 __ lw(index1, MemOperand(sp, 1 * kPointerSize)); | |
3294 __ lw(index2, MemOperand(sp, 0)); | |
3295 __ JumpIfNotBothSmi(index1, index2, &slow_case); | |
3296 | |
3297 // Check that both indices are valid. | |
3298 Label not_hi; | |
3299 __ lw(scratch1, FieldMemOperand(object, JSArray::kLengthOffset)); | |
3300 __ Branch(&slow_case, ls, scratch1, Operand(index1)); | |
3301 __ Branch(¬_hi, NegateCondition(hi), scratch1, Operand(index1)); | |
3302 __ Branch(&slow_case, ls, scratch1, Operand(index2)); | |
3303 __ bind(¬_hi); | |
3304 | |
3305 // Bring the address of the elements into index1 and index2. | |
3306 __ Addu(scratch1, elements, | |
3307 Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
3308 __ sll(index1, index1, kPointerSizeLog2 - kSmiTagSize); | |
3309 __ Addu(index1, scratch1, index1); | |
3310 __ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize); | |
3311 __ Addu(index2, scratch1, index2); | |
3312 | |
3313 // Swap elements. | |
3314 __ lw(scratch1, MemOperand(index1, 0)); | |
3315 __ lw(scratch2, MemOperand(index2, 0)); | |
3316 __ sw(scratch1, MemOperand(index2, 0)); | |
3317 __ sw(scratch2, MemOperand(index1, 0)); | |
3318 | |
3319 Label no_remembered_set; | |
3320 __ CheckPageFlag(elements, | |
3321 scratch1, | |
3322 1 << MemoryChunk::SCAN_ON_SCAVENGE, | |
3323 ne, | |
3324 &no_remembered_set); | |
3325 // Possible optimization: do a check that both values are Smis | |
3326 // (or them and test against Smi mask). | |
3327 | |
3328 // We are swapping two objects in an array and the incremental marker never | |
3329 // pauses in the middle of scanning a single object. Therefore the | |
3330 // incremental marker is not disturbed, so we don't need to call the | |
3331 // RecordWrite stub that notifies the incremental marker. | |
3332 __ RememberedSetHelper(elements, | |
3333 index1, | |
3334 scratch2, | |
3335 kDontSaveFPRegs, | |
3336 MacroAssembler::kFallThroughAtEnd); | |
3337 __ RememberedSetHelper(elements, | |
3338 index2, | |
3339 scratch2, | |
3340 kDontSaveFPRegs, | |
3341 MacroAssembler::kFallThroughAtEnd); | |
3342 | |
3343 __ bind(&no_remembered_set); | |
3344 // We are done. Drop elements from the stack, and return undefined. | |
3345 __ Drop(3); | |
3346 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); | |
3347 __ jmp(&done); | |
3348 | |
3349 __ bind(&slow_case); | |
3350 __ CallRuntime(Runtime::kSwapElements, 3); | |
3351 | |
3352 __ bind(&done); | |
3353 context()->Plug(v0); | |
3354 } | |
3355 | |
3356 | |
3357 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | 3259 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { |
3358 ZoneList<Expression*>* args = expr->arguments(); | 3260 ZoneList<Expression*>* args = expr->arguments(); |
3359 ASSERT_EQ(2, args->length()); | 3261 ASSERT_EQ(2, args->length()); |
3360 | 3262 |
3361 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | 3263 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
3362 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); | 3264 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); |
3363 | 3265 |
3364 Handle<FixedArray> jsfunction_result_caches( | 3266 Handle<FixedArray> jsfunction_result_caches( |
3365 isolate()->global_context()->jsfunction_result_caches()); | 3267 isolate()->global_context()->jsfunction_result_caches()); |
3366 if (jsfunction_result_caches->length() <= cache_id) { | 3268 if (jsfunction_result_caches->length() <= cache_id) { |
(...skipping 1059 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4426 *context_length = 0; | 4328 *context_length = 0; |
4427 return previous_; | 4329 return previous_; |
4428 } | 4330 } |
4429 | 4331 |
4430 | 4332 |
4431 #undef __ | 4333 #undef __ |
4432 | 4334 |
4433 } } // namespace v8::internal | 4335 } } // namespace v8::internal |
4434 | 4336 |
4435 #endif // V8_TARGET_ARCH_MIPS | 4337 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |