| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 3208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3219 ZoneList<Expression*>* args = expr->arguments(); | 3219 ZoneList<Expression*>* args = expr->arguments(); |
| 3220 ASSERT(args->length() == 3); | 3220 ASSERT(args->length() == 3); |
| 3221 VisitForStackValue(args->at(0)); | 3221 VisitForStackValue(args->at(0)); |
| 3222 VisitForStackValue(args->at(1)); | 3222 VisitForStackValue(args->at(1)); |
| 3223 VisitForStackValue(args->at(2)); | 3223 VisitForStackValue(args->at(2)); |
| 3224 __ CallStub(&stub); | 3224 __ CallStub(&stub); |
| 3225 context()->Plug(r0); | 3225 context()->Plug(r0); |
| 3226 } | 3226 } |
| 3227 | 3227 |
| 3228 | 3228 |
| 3229 void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) { | |
| 3230 ZoneList<Expression*>* args = expr->arguments(); | |
| 3231 ASSERT(args->length() == 3); | |
| 3232 VisitForStackValue(args->at(0)); | |
| 3233 VisitForStackValue(args->at(1)); | |
| 3234 VisitForStackValue(args->at(2)); | |
| 3235 Label done; | |
| 3236 Label slow_case; | |
| 3237 Register object = r0; | |
| 3238 Register index1 = r1; | |
| 3239 Register index2 = r2; | |
| 3240 Register elements = r3; | |
| 3241 Register scratch1 = r4; | |
| 3242 Register scratch2 = r5; | |
| 3243 | |
| 3244 __ ldr(object, MemOperand(sp, 2 * kPointerSize)); | |
| 3245 // Fetch the map and check if array is in fast case. | |
| 3246 // Check that object doesn't require security checks and | |
| 3247 // has no indexed interceptor. | |
| 3248 __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE); | |
| 3249 __ b(ne, &slow_case); | |
| 3250 // Map is now in scratch1. | |
| 3251 | |
| 3252 __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset)); | |
| 3253 __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask)); | |
| 3254 __ b(ne, &slow_case); | |
| 3255 | |
| 3256 // Check the object's elements are in fast case and writable. | |
| 3257 __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset)); | |
| 3258 __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset)); | |
| 3259 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | |
| 3260 __ cmp(scratch1, ip); | |
| 3261 __ b(ne, &slow_case); | |
| 3262 | |
| 3263 // Check that both indices are smis. | |
| 3264 __ ldr(index1, MemOperand(sp, 1 * kPointerSize)); | |
| 3265 __ ldr(index2, MemOperand(sp, 0)); | |
| 3266 __ JumpIfNotBothSmi(index1, index2, &slow_case); | |
| 3267 | |
| 3268 // Check that both indices are valid. | |
| 3269 __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset)); | |
| 3270 __ cmp(scratch1, index1); | |
| 3271 __ cmp(scratch1, index2, hi); | |
| 3272 __ b(ls, &slow_case); | |
| 3273 | |
| 3274 // Bring the address of the elements into index1 and index2. | |
| 3275 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 3276 __ add(index1, | |
| 3277 scratch1, | |
| 3278 Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize)); | |
| 3279 __ add(index2, | |
| 3280 scratch1, | |
| 3281 Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize)); | |
| 3282 | |
| 3283 // Swap elements. | |
| 3284 __ ldr(scratch1, MemOperand(index1, 0)); | |
| 3285 __ ldr(scratch2, MemOperand(index2, 0)); | |
| 3286 __ str(scratch1, MemOperand(index2, 0)); | |
| 3287 __ str(scratch2, MemOperand(index1, 0)); | |
| 3288 | |
| 3289 Label no_remembered_set; | |
| 3290 __ CheckPageFlag(elements, | |
| 3291 scratch1, | |
| 3292 1 << MemoryChunk::SCAN_ON_SCAVENGE, | |
| 3293 ne, | |
| 3294 &no_remembered_set); | |
| 3295 // Possible optimization: do a check that both values are Smis | |
| 3296 // (or them and test against Smi mask.) | |
| 3297 | |
| 3298 // We are swapping two objects in an array and the incremental marker never | |
| 3299 // pauses in the middle of scanning a single object. Therefore the | |
| 3300 // incremental marker is not disturbed, so we don't need to call the | |
| 3301 // RecordWrite stub that notifies the incremental marker. | |
| 3302 __ RememberedSetHelper(elements, | |
| 3303 index1, | |
| 3304 scratch2, | |
| 3305 kDontSaveFPRegs, | |
| 3306 MacroAssembler::kFallThroughAtEnd); | |
| 3307 __ RememberedSetHelper(elements, | |
| 3308 index2, | |
| 3309 scratch2, | |
| 3310 kDontSaveFPRegs, | |
| 3311 MacroAssembler::kFallThroughAtEnd); | |
| 3312 | |
| 3313 __ bind(&no_remembered_set); | |
| 3314 // We are done. Drop elements from the stack, and return undefined. | |
| 3315 __ Drop(3); | |
| 3316 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | |
| 3317 __ jmp(&done); | |
| 3318 | |
| 3319 __ bind(&slow_case); | |
| 3320 __ CallRuntime(Runtime::kSwapElements, 3); | |
| 3321 | |
| 3322 __ bind(&done); | |
| 3323 context()->Plug(r0); | |
| 3324 } | |
| 3325 | |
| 3326 | |
| 3327 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { | 3229 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { |
| 3328 ZoneList<Expression*>* args = expr->arguments(); | 3230 ZoneList<Expression*>* args = expr->arguments(); |
| 3329 ASSERT_EQ(2, args->length()); | 3231 ASSERT_EQ(2, args->length()); |
| 3330 ASSERT_NE(NULL, args->at(0)->AsLiteral()); | 3232 ASSERT_NE(NULL, args->at(0)->AsLiteral()); |
| 3331 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); | 3233 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); |
| 3332 | 3234 |
| 3333 Handle<FixedArray> jsfunction_result_caches( | 3235 Handle<FixedArray> jsfunction_result_caches( |
| 3334 isolate()->global_context()->jsfunction_result_caches()); | 3236 isolate()->global_context()->jsfunction_result_caches()); |
| 3335 if (jsfunction_result_caches->length() <= cache_id) { | 3237 if (jsfunction_result_caches->length() <= cache_id) { |
| 3336 __ Abort("Attempt to use undefined cache."); | 3238 __ Abort("Attempt to use undefined cache."); |
| (...skipping 1055 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4392 *context_length = 0; | 4294 *context_length = 0; |
| 4393 return previous_; | 4295 return previous_; |
| 4394 } | 4296 } |
| 4395 | 4297 |
| 4396 | 4298 |
| 4397 #undef __ | 4299 #undef __ |
| 4398 | 4300 |
| 4399 } } // namespace v8::internal | 4301 } } // namespace v8::internal |
| 4400 | 4302 |
| 4401 #endif // V8_TARGET_ARCH_ARM | 4303 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |