2 * Copyright (C) 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
35 #include "JITInlines.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameEnumerator.h"
40 #include "LinkBuffer.h"
41 #include "MaxFrameExtentForSlowPathCall.h"
42 #include "RepatchBuffer.h"
43 #include "SlowPathCall.h"
44 #include "TypeLocation.h"
45 #include "TypeProfilerLog.h"
46 #include "VirtualRegister.h"
52 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
54 return vm->getCTIStub(nativeCallGenerator);
57 void JIT::emit_op_mov(Instruction* currentInstruction)
59 int dst = currentInstruction[1].u.operand;
60 int src = currentInstruction[2].u.operand;
62 emitGetVirtualRegister(src, regT0);
63 emitPutVirtualRegister(dst);
66 void JIT::emit_op_captured_mov(Instruction* currentInstruction)
68 int dst = currentInstruction[1].u.operand;
69 int src = currentInstruction[2].u.operand;
71 emitGetVirtualRegister(src, regT0);
72 emitNotifyWrite(regT0, regT1, currentInstruction[3].u.watchpointSet);
73 emitPutVirtualRegister(dst);
76 void JIT::emit_op_end(Instruction* currentInstruction)
78 RELEASE_ASSERT(returnValueGPR != callFrameRegister);
79 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
80 emitFunctionEpilogue();
84 void JIT::emit_op_jmp(Instruction* currentInstruction)
86 unsigned target = currentInstruction[1].u.operand;
87 addJump(jump(), target);
90 void JIT::emit_op_new_object(Instruction* currentInstruction)
92 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
93 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
94 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
96 RegisterID resultReg = regT0;
97 RegisterID allocatorReg = regT1;
98 RegisterID scratchReg = regT2;
100 move(TrustedImmPtr(allocator), allocatorReg);
101 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
102 emitPutVirtualRegister(currentInstruction[1].u.operand);
105 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
108 int dst = currentInstruction[1].u.operand;
109 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
110 callOperation(operationNewObject, structure);
111 emitStoreCell(dst, returnValueGPR);
114 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
116 int baseVal = currentInstruction[3].u.operand;
118 emitGetVirtualRegister(baseVal, regT0);
120 // Check that baseVal is a cell.
121 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
123 // Check that baseVal 'ImplementsHasInstance'.
124 addSlowCase(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
127 void JIT::emit_op_instanceof(Instruction* currentInstruction)
129 int dst = currentInstruction[1].u.operand;
130 int value = currentInstruction[2].u.operand;
131 int proto = currentInstruction[3].u.operand;
133 // Load the operands (baseVal, proto, and value respectively) into registers.
134 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
135 emitGetVirtualRegister(value, regT2);
136 emitGetVirtualRegister(proto, regT1);
138 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
139 emitJumpSlowCaseIfNotJSCell(regT2, value);
140 emitJumpSlowCaseIfNotJSCell(regT1, proto);
142 // Check that prototype is an object
143 addSlowCase(emitJumpIfCellNotObject(regT1));
145 // Optimistically load the result true, and start looping.
146 // Initially, regT1 still contains proto and regT2 still contains value.
147 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
148 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
151 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
152 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
153 emitLoadStructure(regT2, regT2, regT3);
154 load64(Address(regT2, Structure::prototypeOffset()), regT2);
155 Jump isInstance = branchPtr(Equal, regT2, regT1);
156 emitJumpIfJSCell(regT2).linkTo(loop, this);
158 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
159 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
161 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
162 isInstance.link(this);
163 emitPutVirtualRegister(dst);
166 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
168 int dst = currentInstruction[1].u.operand;
169 int value = currentInstruction[2].u.operand;
171 emitGetVirtualRegister(value, regT0);
172 Jump isCell = emitJumpIfJSCell(regT0);
174 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
178 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
179 move(TrustedImm32(0), regT0);
180 Jump notMasqueradesAsUndefined = jump();
182 isMasqueradesAsUndefined.link(this);
183 emitLoadStructure(regT0, regT1, regT2);
184 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
185 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
186 comparePtr(Equal, regT0, regT1, regT0);
188 notMasqueradesAsUndefined.link(this);
190 emitTagAsBoolImmediate(regT0);
191 emitPutVirtualRegister(dst);
194 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
196 int dst = currentInstruction[1].u.operand;
197 int value = currentInstruction[2].u.operand;
199 emitGetVirtualRegister(value, regT0);
200 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
201 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
202 emitTagAsBoolImmediate(regT0);
203 emitPutVirtualRegister(dst);
206 void JIT::emit_op_is_number(Instruction* currentInstruction)
208 int dst = currentInstruction[1].u.operand;
209 int value = currentInstruction[2].u.operand;
211 emitGetVirtualRegister(value, regT0);
212 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
213 emitTagAsBoolImmediate(regT0);
214 emitPutVirtualRegister(dst);
217 void JIT::emit_op_is_string(Instruction* currentInstruction)
219 int dst = currentInstruction[1].u.operand;
220 int value = currentInstruction[2].u.operand;
222 emitGetVirtualRegister(value, regT0);
223 Jump isNotCell = emitJumpIfNotJSCell(regT0);
225 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
226 emitTagAsBoolImmediate(regT0);
229 isNotCell.link(this);
230 move(TrustedImm32(ValueFalse), regT0);
233 emitPutVirtualRegister(dst);
236 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
238 int activation = currentInstruction[1].u.operand;
239 Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
240 emitGetVirtualRegister(activation, regT0);
241 callOperation(operationTearOffActivation, regT0);
242 activationNotCreated.link(this);
245 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
247 int arguments = currentInstruction[1].u.operand;
248 int activation = currentInstruction[2].u.operand;
250 Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset())));
251 emitGetVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0);
252 emitGetVirtualRegister(activation, regT1);
253 callOperation(operationTearOffArguments, regT0, regT1);
254 argsNotCreated.link(this);
257 void JIT::emit_op_ret(Instruction* currentInstruction)
259 ASSERT(callFrameRegister != regT1);
260 ASSERT(regT1 != returnValueGPR);
261 ASSERT(returnValueGPR != callFrameRegister);
263 // Return the result in %eax.
264 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
266 checkStackPointerAlignment();
267 emitFunctionEpilogue();
271 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
273 ASSERT(callFrameRegister != regT1);
274 ASSERT(regT1 != returnValueGPR);
275 ASSERT(returnValueGPR != callFrameRegister);
277 // Return the result in %eax.
278 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
279 Jump notJSCell = emitJumpIfNotJSCell(returnValueGPR);
280 Jump notObject = emitJumpIfCellNotObject(returnValueGPR);
283 emitFunctionEpilogue();
286 // Return 'this' in %eax.
287 notJSCell.link(this);
288 notObject.link(this);
289 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueGPR);
292 emitFunctionEpilogue();
296 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
298 int dst = currentInstruction[1].u.operand;
299 int src = currentInstruction[2].u.operand;
301 emitGetVirtualRegister(src, regT0);
303 Jump isImm = emitJumpIfNotJSCell(regT0);
304 addSlowCase(branchStructure(NotEqual,
305 Address(regT0, JSCell::structureIDOffset()),
306 m_vm->stringStructure.get()));
310 emitPutVirtualRegister(dst);
314 void JIT::emit_op_strcat(Instruction* currentInstruction)
316 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
320 void JIT::emit_op_not(Instruction* currentInstruction)
322 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
324 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
325 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
326 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
327 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
328 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
329 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
331 emitPutVirtualRegister(currentInstruction[1].u.operand);
334 void JIT::emit_op_jfalse(Instruction* currentInstruction)
336 unsigned target = currentInstruction[2].u.operand;
337 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
339 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
340 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
342 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
343 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
345 isNonZero.link(this);
348 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
350 int src = currentInstruction[1].u.operand;
351 unsigned target = currentInstruction[2].u.operand;
353 emitGetVirtualRegister(src, regT0);
354 Jump isImmediate = emitJumpIfNotJSCell(regT0);
356 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
357 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
358 emitLoadStructure(regT0, regT2, regT1);
359 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
360 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
361 Jump masqueradesGlobalObjectIsForeign = jump();
363 // Now handle the immediate cases - undefined & null
364 isImmediate.link(this);
365 and64(TrustedImm32(~TagBitUndefined), regT0);
366 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
368 isNotMasqueradesAsUndefined.link(this);
369 masqueradesGlobalObjectIsForeign.link(this);
371 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
373 int src = currentInstruction[1].u.operand;
374 unsigned target = currentInstruction[2].u.operand;
376 emitGetVirtualRegister(src, regT0);
377 Jump isImmediate = emitJumpIfNotJSCell(regT0);
379 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
380 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
381 emitLoadStructure(regT0, regT2, regT1);
382 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
383 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
384 Jump wasNotImmediate = jump();
386 // Now handle the immediate cases - undefined & null
387 isImmediate.link(this);
388 and64(TrustedImm32(~TagBitUndefined), regT0);
389 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
391 wasNotImmediate.link(this);
394 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
396 int src = currentInstruction[1].u.operand;
397 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
398 unsigned target = currentInstruction[3].u.operand;
400 emitGetVirtualRegister(src, regT0);
401 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
404 void JIT::emit_op_eq(Instruction* currentInstruction)
406 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
407 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
408 compare32(Equal, regT1, regT0, regT0);
409 emitTagAsBoolImmediate(regT0);
410 emitPutVirtualRegister(currentInstruction[1].u.operand);
413 void JIT::emit_op_jtrue(Instruction* currentInstruction)
415 unsigned target = currentInstruction[2].u.operand;
416 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
418 Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
419 addJump(emitJumpIfImmediateInteger(regT0), target);
421 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
422 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
427 void JIT::emit_op_neq(Instruction* currentInstruction)
429 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
430 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
431 compare32(NotEqual, regT1, regT0, regT0);
432 emitTagAsBoolImmediate(regT0);
434 emitPutVirtualRegister(currentInstruction[1].u.operand);
438 void JIT::emit_op_bitxor(Instruction* currentInstruction)
440 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
441 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
443 emitFastArithReTagImmediate(regT0, regT0);
444 emitPutVirtualRegister(currentInstruction[1].u.operand);
447 void JIT::emit_op_bitor(Instruction* currentInstruction)
449 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
450 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
452 emitPutVirtualRegister(currentInstruction[1].u.operand);
455 void JIT::emit_op_throw(Instruction* currentInstruction)
457 ASSERT(regT0 == returnValueGPR);
458 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
459 callOperationNoExceptionCheck(operationThrow, regT0);
460 jumpToExceptionHandler();
463 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
465 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
466 callOperation(operationPushWithScope, regT0);
469 void JIT::emit_op_pop_scope(Instruction*)
471 callOperation(operationPopScope);
474 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
476 int dst = currentInstruction[1].u.operand;
477 int src1 = currentInstruction[2].u.operand;
478 int src2 = currentInstruction[3].u.operand;
480 emitGetVirtualRegisters(src1, regT0, src2, regT1);
482 // Jump slow if both are cells (to cover strings).
485 addSlowCase(emitJumpIfJSCell(regT2));
487 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
489 Jump leftOK = emitJumpIfImmediateInteger(regT0);
490 addSlowCase(emitJumpIfImmediateNumber(regT0));
492 Jump rightOK = emitJumpIfImmediateInteger(regT1);
493 addSlowCase(emitJumpIfImmediateNumber(regT1));
496 if (type == OpStrictEq)
497 compare64(Equal, regT1, regT0, regT0);
499 compare64(NotEqual, regT1, regT0, regT0);
500 emitTagAsBoolImmediate(regT0);
502 emitPutVirtualRegister(dst);
505 void JIT::emit_op_stricteq(Instruction* currentInstruction)
507 compileOpStrictEq(currentInstruction, OpStrictEq);
510 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
512 compileOpStrictEq(currentInstruction, OpNStrictEq);
515 void JIT::emit_op_to_number(Instruction* currentInstruction)
517 int srcVReg = currentInstruction[2].u.operand;
518 emitGetVirtualRegister(srcVReg, regT0);
520 addSlowCase(emitJumpIfNotImmediateNumber(regT0));
522 emitPutVirtualRegister(currentInstruction[1].u.operand);
525 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
527 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
528 callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT0, currentInstruction[3].u.operand);
531 void JIT::emit_op_catch(Instruction* currentInstruction)
533 move(TrustedImmPtr(m_vm), regT3);
534 load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister);
535 load64(Address(regT3, VM::vmEntryFrameForThrowOffset()), regT0);
536 store64(regT0, Address(regT3, VM::topVMEntryFrameOffset()));
538 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
540 load64(Address(regT3, VM::exceptionOffset()), regT0);
541 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
542 emitPutVirtualRegister(currentInstruction[1].u.operand);
545 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
547 size_t tableIndex = currentInstruction[1].u.operand;
548 unsigned defaultOffset = currentInstruction[2].u.operand;
549 unsigned scrutinee = currentInstruction[3].u.operand;
551 // create jump table for switch destinations, track this switch statement.
552 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
553 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
554 jumpTable->ensureCTITable();
556 emitGetVirtualRegister(scrutinee, regT0);
557 callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
558 jump(returnValueGPR);
561 void JIT::emit_op_switch_char(Instruction* currentInstruction)
563 size_t tableIndex = currentInstruction[1].u.operand;
564 unsigned defaultOffset = currentInstruction[2].u.operand;
565 unsigned scrutinee = currentInstruction[3].u.operand;
567 // create jump table for switch destinations, track this switch statement.
568 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
569 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
570 jumpTable->ensureCTITable();
572 emitGetVirtualRegister(scrutinee, regT0);
573 callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
574 jump(returnValueGPR);
577 void JIT::emit_op_switch_string(Instruction* currentInstruction)
579 size_t tableIndex = currentInstruction[1].u.operand;
580 unsigned defaultOffset = currentInstruction[2].u.operand;
581 unsigned scrutinee = currentInstruction[3].u.operand;
583 // create jump table for switch destinations, track this switch statement.
584 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
585 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
587 emitGetVirtualRegister(scrutinee, regT0);
588 callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
589 jump(returnValueGPR);
592 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
594 move(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))), regT0);
595 callOperation(operationThrowStaticError, regT0, currentInstruction[2].u.operand);
598 void JIT::emit_op_debug(Instruction* currentInstruction)
600 load32(codeBlock()->debuggerRequestsAddress(), regT0);
601 Jump noDebuggerRequests = branchTest32(Zero, regT0);
602 callOperation(operationDebug, currentInstruction[1].u.operand);
603 noDebuggerRequests.link(this);
606 void JIT::emit_op_eq_null(Instruction* currentInstruction)
608 int dst = currentInstruction[1].u.operand;
609 int src1 = currentInstruction[2].u.operand;
611 emitGetVirtualRegister(src1, regT0);
612 Jump isImmediate = emitJumpIfNotJSCell(regT0);
614 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
615 move(TrustedImm32(0), regT0);
616 Jump wasNotMasqueradesAsUndefined = jump();
618 isMasqueradesAsUndefined.link(this);
619 emitLoadStructure(regT0, regT2, regT1);
620 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
621 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
622 comparePtr(Equal, regT0, regT2, regT0);
623 Jump wasNotImmediate = jump();
625 isImmediate.link(this);
627 and64(TrustedImm32(~TagBitUndefined), regT0);
628 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
630 wasNotImmediate.link(this);
631 wasNotMasqueradesAsUndefined.link(this);
633 emitTagAsBoolImmediate(regT0);
634 emitPutVirtualRegister(dst);
638 void JIT::emit_op_neq_null(Instruction* currentInstruction)
640 int dst = currentInstruction[1].u.operand;
641 int src1 = currentInstruction[2].u.operand;
643 emitGetVirtualRegister(src1, regT0);
644 Jump isImmediate = emitJumpIfNotJSCell(regT0);
646 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
647 move(TrustedImm32(1), regT0);
648 Jump wasNotMasqueradesAsUndefined = jump();
650 isMasqueradesAsUndefined.link(this);
651 emitLoadStructure(regT0, regT2, regT1);
652 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
653 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
654 comparePtr(NotEqual, regT0, regT2, regT0);
655 Jump wasNotImmediate = jump();
657 isImmediate.link(this);
659 and64(TrustedImm32(~TagBitUndefined), regT0);
660 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
662 wasNotImmediate.link(this);
663 wasNotMasqueradesAsUndefined.link(this);
665 emitTagAsBoolImmediate(regT0);
666 emitPutVirtualRegister(dst);
669 void JIT::emit_op_enter(Instruction*)
671 // Even though CTI doesn't use them, we initialize our constant
672 // registers to zap stale pointers, to avoid unnecessarily prolonging
673 // object lifetime and increasing GC pressure.
674 size_t count = m_codeBlock->m_numVars;
675 for (size_t j = 0; j < count; ++j)
676 emitInitRegister(virtualRegisterForLocal(j).offset());
678 emitWriteBarrier(m_codeBlock->ownerExecutable());
680 emitEnterOptimizationCheck();
683 void JIT::emit_op_create_activation(Instruction* currentInstruction)
685 int dst = currentInstruction[1].u.operand;
687 callOperation(operationCreateActivation, 0);
688 emitStoreCell(dst, returnValueGPR);
691 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
693 int dst = currentInstruction[1].u.operand;
695 Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
697 callOperation(operationCreateArguments);
698 emitStoreCell(dst, returnValueGPR);
699 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst)), returnValueGPR);
701 argsCreated.link(this);
704 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
706 int dst = currentInstruction[1].u.operand;
708 store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
711 void JIT::emit_op_to_this(Instruction* currentInstruction)
713 WriteBarrierBase<Structure>* cachedStructure = ¤tInstruction[2].u.structure;
714 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
716 emitJumpSlowCaseIfNotJSCell(regT1);
718 addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
719 loadPtr(cachedStructure, regT2);
720 addSlowCase(branchTestPtr(Zero, regT2));
721 load32(Address(regT2, Structure::structureIDOffset()), regT2);
722 addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
725 void JIT::emit_op_get_callee(Instruction* currentInstruction)
727 int result = currentInstruction[1].u.operand;
728 WriteBarrierBase<JSCell>* cachedFunction = ¤tInstruction[2].u.jsCell;
729 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
731 loadPtr(cachedFunction, regT2);
732 addSlowCase(branchPtr(NotEqual, regT0, regT2));
734 emitPutVirtualRegister(result);
737 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
741 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
745 void JIT::emit_op_create_this(Instruction* currentInstruction)
747 int callee = currentInstruction[2].u.operand;
748 RegisterID calleeReg = regT0;
749 RegisterID resultReg = regT0;
750 RegisterID allocatorReg = regT1;
751 RegisterID structureReg = regT2;
752 RegisterID scratchReg = regT3;
754 emitGetVirtualRegister(callee, calleeReg);
755 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
756 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
757 addSlowCase(branchTestPtr(Zero, allocatorReg));
759 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
760 emitPutVirtualRegister(currentInstruction[1].u.operand);
763 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
765 linkSlowCase(iter); // doesn't have an allocation profile
766 linkSlowCase(iter); // allocation failed
768 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
772 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
774 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
775 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
776 callOperation(operationProfileWillCall, regT0);
777 profilerDone.link(this);
780 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
782 Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress()));
783 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
784 callOperation(operationProfileDidCall, regT0);
785 profilerDone.link(this);
791 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
798 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
802 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
806 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
810 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
814 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
818 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
821 callOperation(operationConvertJSValueToBoolean, regT0);
822 emitJumpSlowToHot(branchTest32(Zero, returnValueGPR), currentInstruction[2].u.operand); // inverted!
825 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
828 callOperation(operationConvertJSValueToBoolean, regT0);
829 emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand);
832 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
835 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
839 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
842 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
846 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
849 callOperation(operationCompareEq, regT0, regT1);
850 emitTagAsBoolImmediate(returnValueGPR);
851 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
854 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
857 callOperation(operationCompareEq, regT0, regT1);
858 xor32(TrustedImm32(0x1), regT0);
859 emitTagAsBoolImmediate(returnValueGPR);
860 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
863 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
868 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
872 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
877 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
881 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
883 int dst = currentInstruction[1].u.operand;
884 int value = currentInstruction[2].u.operand;
885 int baseVal = currentInstruction[3].u.operand;
887 linkSlowCaseIfNotJSCell(iter, baseVal);
889 emitGetVirtualRegister(value, regT0);
890 emitGetVirtualRegister(baseVal, regT1);
891 callOperation(operationCheckHasInstance, dst, regT0, regT1);
893 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
896 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
898 int dst = currentInstruction[1].u.operand;
899 int value = currentInstruction[2].u.operand;
900 int proto = currentInstruction[3].u.operand;
902 linkSlowCaseIfNotJSCell(iter, value);
903 linkSlowCaseIfNotJSCell(iter, proto);
905 emitGetVirtualRegister(value, regT0);
906 emitGetVirtualRegister(proto, regT1);
907 callOperation(operationInstanceOf, dst, regT0, regT1);
910 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
914 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
918 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
920 int dst = currentInstruction[1].u.operand;
921 int argumentsRegister = currentInstruction[2].u.operand;
922 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
923 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
924 sub32(TrustedImm32(1), regT0);
925 emitFastArithReTagImmediate(regT0, regT0);
926 emitPutVirtualRegister(dst, regT0);
929 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
932 int dst = currentInstruction[1].u.operand;
933 int base = currentInstruction[2].u.operand;
934 callOperation(operationGetArgumentsLength, dst, base);
937 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
939 int dst = currentInstruction[1].u.operand;
940 int argumentsRegister = currentInstruction[2].u.operand;
941 int property = currentInstruction[3].u.operand;
942 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
943 emitGetVirtualRegister(property, regT1);
944 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
945 add32(TrustedImm32(1), regT1);
946 // regT1 now contains the integer index of the argument we want, including this
947 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
948 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
950 signExtend32ToPtr(regT1, regT1);
951 load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
952 emitValueProfilingSite();
953 emitPutVirtualRegister(dst, regT0);
956 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
958 int dst = currentInstruction[1].u.operand;
959 int arguments = currentInstruction[2].u.operand;
960 int property = currentInstruction[3].u.operand;
963 Jump skipArgumentsCreation = jump();
967 callOperation(operationCreateArguments);
968 emitStoreCell(arguments, returnValueGPR);
969 emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments)), returnValueGPR);
971 skipArgumentsCreation.link(this);
972 emitGetVirtualRegister(arguments, regT0);
973 emitGetVirtualRegister(property, regT1);
974 callOperation(WithProfile, operationGetByValGeneric, dst, regT0, regT1);
977 #endif // USE(JSVALUE64)
979 void JIT::emit_op_touch_entry(Instruction* currentInstruction)
981 if (m_codeBlock->symbolTable()->m_functionEnteredOnce.hasBeenInvalidated())
984 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_touch_entry);
988 void JIT::emit_op_loop_hint(Instruction*)
990 // Emit the JIT optimization check:
991 if (canBeOptimized()) {
992 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
993 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
996 // Emit the watchdog timer check:
997 if (m_vm->watchdog && m_vm->watchdog->isEnabled())
998 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog->timerDidFireAddress())));
1001 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1004 // Emit the slow path for the JIT optimization check:
1005 if (canBeOptimized()) {
1008 callOperation(operationOptimize, m_bytecodeOffset);
1009 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
1010 if (!ASSERT_DISABLED) {
1011 Jump ok = branchPtr(MacroAssembler::Above, regT0, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
1012 abortWithReason(JITUnreasonableLoopHintJumpTarget);
1015 jump(returnValueGPR);
1016 noOptimizedEntry.link(this);
1018 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1022 // Emit the slow path of the watchdog timer check:
1023 if (m_vm->watchdog && m_vm->watchdog->isEnabled()) {
1025 callOperation(operationHandleWatchdogTimer);
1027 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1032 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1034 callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
1037 void JIT::emit_op_new_func(Instruction* currentInstruction)
1040 int dst = currentInstruction[1].u.operand;
1041 if (currentInstruction[3].u.operand) {
1042 #if USE(JSVALUE32_64)
1043 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1045 lazyJump = branchTest64(NonZero, addressFor(dst));
1049 FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[2].u.operand);
1050 callOperation(operationNewFunction, dst, funcExec);
1052 if (currentInstruction[3].u.operand)
1053 lazyJump.link(this);
1056 void JIT::emit_op_new_captured_func(Instruction* currentInstruction)
1058 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_captured_func);
1059 slowPathCall.call();
1062 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1064 int dst = currentInstruction[1].u.operand;
1065 FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[2].u.operand);
1066 callOperation(operationNewFunction, dst, funcExpr);
1069 void JIT::emit_op_new_array(Instruction* currentInstruction)
1071 int dst = currentInstruction[1].u.operand;
1072 int valuesIndex = currentInstruction[2].u.operand;
1073 int size = currentInstruction[3].u.operand;
1074 addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1075 callOperation(operationNewArrayWithProfile, dst,
1076 currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1079 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1081 int dst = currentInstruction[1].u.operand;
1082 int sizeIndex = currentInstruction[2].u.operand;
1084 emitGetVirtualRegister(sizeIndex, regT0);
1085 callOperation(operationNewArrayWithSizeAndProfile, dst,
1086 currentInstruction[3].u.arrayAllocationProfile, regT0);
1088 emitLoad(sizeIndex, regT1, regT0);
1089 callOperation(operationNewArrayWithSizeAndProfile, dst,
1090 currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1094 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1096 int dst = currentInstruction[1].u.operand;
1097 int valuesIndex = currentInstruction[2].u.operand;
1098 int size = currentInstruction[3].u.operand;
1099 const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1100 callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1103 void JIT::emitSlow_op_captured_mov(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1105 VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet;
1106 if (!set || set->state() == IsInvalidated)
1108 #if USE(JSVALUE32_64)
1112 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_captured_mov);
1113 slowPathCall.call();
1117 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1119 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1120 slowPathCall.call();
1123 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1125 int dst = currentInstruction[1].u.operand;
1126 int base = currentInstruction[2].u.operand;
1127 int enumerator = currentInstruction[4].u.operand;
1129 emitGetVirtualRegister(base, regT0);
1130 emitGetVirtualRegister(enumerator, regT1);
1131 emitJumpSlowCaseIfNotJSCell(regT0, base);
1133 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1134 addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1136 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1137 emitPutVirtualRegister(dst);
1140 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1145 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1146 slowPathCall.call();
1149 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1151 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1152 slowPathCall.call();
1155 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1157 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1159 PatchableJump badType;
1161 // FIXME: Add support for other types like TypedArrays and Arguments.
1162 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1163 JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1164 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1167 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1169 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1170 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1172 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1174 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1175 m_codeBlock, patchBuffer,
1176 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1178 RepatchBuffer repatchBuffer(m_codeBlock);
1179 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1180 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationHasIndexedPropertyGeneric));
1183 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1185 int dst = currentInstruction[1].u.operand;
1186 int base = currentInstruction[2].u.operand;
1187 int property = currentInstruction[3].u.operand;
1188 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1190 emitGetVirtualRegisters(base, regT0, property, regT1);
1192 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1193 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1194 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1195 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1196 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1197 // extending since it makes it easier to re-tag the value in the slow case.
1198 zeroExtend32ToPtr(regT1, regT1);
1200 emitJumpSlowCaseIfNotJSCell(regT0, base);
1201 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1202 and32(TrustedImm32(IndexingShapeMask), regT2);
1204 JITArrayMode mode = chooseArrayMode(profile);
1205 PatchableJump badType;
1207 // FIXME: Add support for other types like TypedArrays and Arguments.
1208 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1209 JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1211 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1213 addSlowCase(badType);
1214 addSlowCase(slowCases);
1216 Label done = label();
1218 emitPutVirtualRegister(dst);
1220 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
1223 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1225 int dst = currentInstruction[1].u.operand;
1226 int base = currentInstruction[2].u.operand;
1227 int property = currentInstruction[3].u.operand;
1228 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1230 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1231 linkSlowCase(iter); // base array check
1233 Jump skipProfiling = jump();
1235 linkSlowCase(iter); // vector length check
1236 linkSlowCase(iter); // empty value
1238 emitArrayProfileOutOfBoundsSpecialCase(profile);
1240 skipProfiling.link(this);
1242 Label slowPath = label();
1244 emitGetVirtualRegister(base, regT0);
1245 emitGetVirtualRegister(property, regT1);
1246 Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1);
1248 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1249 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1250 m_byValInstructionIndex++;
1253 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1255 int dst = currentInstruction[1].u.operand;
1256 int base = currentInstruction[2].u.operand;
1257 int index = currentInstruction[4].u.operand;
1258 int enumerator = currentInstruction[5].u.operand;
1260 // Check that base is a cell
1261 emitGetVirtualRegister(base, regT0);
1262 emitJumpSlowCaseIfNotJSCell(regT0, base);
1264 // Check the structure
1265 emitGetVirtualRegister(enumerator, regT2);
1266 load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1267 addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1269 // Compute the offset
1270 emitGetVirtualRegister(index, regT1);
1271 // If index is less than the enumerator's cached inline storage, then it's an inline access
1272 Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1273 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1274 signExtend32ToPtr(regT1, regT1);
1275 load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1279 // Otherwise it's out of line
1280 outOfLineAccess.link(this);
1281 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1282 sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1284 signExtend32ToPtr(regT1, regT1);
1285 int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1286 load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1289 emitValueProfilingSite();
1290 emitPutVirtualRegister(dst, regT0);
1293 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1295 int base = currentInstruction[2].u.operand;
1296 linkSlowCaseIfNotJSCell(iter, base);
1299 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1300 slowPathCall.call();
1303 void JIT::emit_op_get_structure_property_enumerator(Instruction* currentInstruction)
1305 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_structure_property_enumerator);
1306 slowPathCall.call();
1309 void JIT::emit_op_get_generic_property_enumerator(Instruction* currentInstruction)
1311 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_generic_property_enumerator);
1312 slowPathCall.call();
1315 void JIT::emit_op_next_enumerator_pname(Instruction* currentInstruction)
1317 int dst = currentInstruction[1].u.operand;
1318 int enumerator = currentInstruction[2].u.operand;
1319 int index = currentInstruction[3].u.operand;
1321 emitGetVirtualRegister(index, regT0);
1322 emitGetVirtualRegister(enumerator, regT1);
1323 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesLengthOffset()));
1325 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1328 inBounds.link(this);
1330 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1331 signExtend32ToPtr(regT0, regT0);
1332 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1335 emitPutVirtualRegister(dst);
1338 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1340 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1341 slowPathCall.call();
1344 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1346 TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1347 int valueToProfile = currentInstruction[1].u.operand;
1349 emitGetVirtualRegister(valueToProfile, regT0);
1353 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1354 // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1355 if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1356 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1357 else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1358 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1359 else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1361 and64(TrustedImm32(~1), regT1);
1362 jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1363 } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt)
1364 jumpToEnd.append(emitJumpIfImmediateInteger(regT0));
1365 else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1366 jumpToEnd.append(emitJumpIfImmediateNumber(regT0));
1367 else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1368 Jump isNotCell = emitJumpIfNotJSCell(regT0);
1369 jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1370 isNotCell.link(this);
1373 // Load the type profiling log into T2.
1374 TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1375 move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1376 // Load the next log entry into T1.
1377 loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1379 // Store the JSValue onto the log entry.
1380 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1382 // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1383 Jump notCell = emitJumpIfNotJSCell(regT0);
1384 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1385 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1386 Jump skipIsCell = jump();
1388 store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1389 skipIsCell.link(this);
1391 // Store the typeLocation on the log entry.
1392 move(TrustedImmPtr(cachedTypeLocation), regT0);
1393 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1395 // Increment the current log entry.
1396 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1397 store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1398 Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1399 // Clear the log if we're at the end of the log.
1400 callOperation(operationProcessTypeProfilerLog);
1401 skipClearLog.link(this);
1403 jumpToEnd.link(this);
1406 #endif // USE(JSVALUE64)
1410 #endif // ENABLE(JIT)