2 * Copyright (C) 2009-2017 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BasicBlockLocation.h"
32 #include "Exception.h"
34 #include "Interpreter.h"
35 #include "JITInlines.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameEnumerator.h"
40 #include "LinkBuffer.h"
41 #include "MaxFrameExtentForSlowPathCall.h"
42 #include "SlowPathCall.h"
43 #include "TypeLocation.h"
44 #include "TypeProfilerLog.h"
45 #include "VirtualRegister.h"
52 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
54 return vm->getCTIStub(nativeCallGenerator);
57 void JIT::emit_op_mov(Instruction* currentInstruction)
59 int dst = currentInstruction[1].u.operand;
60 int src = currentInstruction[2].u.operand;
62 emitGetVirtualRegister(src, regT0);
63 emitPutVirtualRegister(dst);
67 void JIT::emit_op_end(Instruction* currentInstruction)
69 RELEASE_ASSERT(returnValueGPR != callFrameRegister);
70 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
71 emitRestoreCalleeSaves();
72 emitFunctionEpilogue();
76 void JIT::emit_op_jmp(Instruction* currentInstruction)
78 unsigned target = currentInstruction[1].u.operand;
79 addJump(jump(), target);
82 void JIT::emit_op_new_object(Instruction* currentInstruction)
84 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
85 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
86 MarkedAllocator* allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorFor(allocationSize);
88 RegisterID resultReg = regT0;
89 RegisterID allocatorReg = regT1;
90 RegisterID scratchReg = regT2;
92 move(TrustedImmPtr(allocator), allocatorReg);
96 emitAllocateJSObject(resultReg, allocator, allocatorReg, TrustedImmPtr(structure), TrustedImmPtr(0), scratchReg, slowCases);
97 emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
98 addSlowCase(slowCases);
99 emitPutVirtualRegister(currentInstruction[1].u.operand);
102 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
106 int dst = currentInstruction[1].u.operand;
107 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
108 callOperation(operationNewObject, structure);
109 emitStoreCell(dst, returnValueGPR);
112 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
114 int dst = currentInstruction[1].u.operand;
115 int constructor = currentInstruction[2].u.operand;
116 int hasInstanceValue = currentInstruction[3].u.operand;
118 emitGetVirtualRegister(hasInstanceValue, regT0);
120 // We don't jump if we know what Symbol.hasInstance would do.
121 Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
123 emitGetVirtualRegister(constructor, regT0);
125 // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
126 test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
130 customhasInstanceValue.link(this);
131 move(TrustedImm32(ValueTrue), regT0);
134 emitPutVirtualRegister(dst);
137 void JIT::emit_op_instanceof(Instruction* currentInstruction)
139 int dst = currentInstruction[1].u.operand;
140 int value = currentInstruction[2].u.operand;
141 int proto = currentInstruction[3].u.operand;
143 // Load the operands (baseVal, proto, and value respectively) into registers.
144 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
145 emitGetVirtualRegister(value, regT2);
146 emitGetVirtualRegister(proto, regT1);
148 // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
149 emitJumpSlowCaseIfNotJSCell(regT2, value);
150 emitJumpSlowCaseIfNotJSCell(regT1, proto);
152 // Check that prototype is an object
153 addSlowCase(emitJumpIfCellNotObject(regT1));
155 // Optimistically load the result true, and start looping.
156 // Initially, regT1 still contains proto and regT2 still contains value.
157 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
158 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
161 addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
163 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
164 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
165 emitLoadStructure(*vm(), regT2, regT2, regT3);
166 load64(Address(regT2, Structure::prototypeOffset()), regT2);
167 Jump isInstance = branchPtr(Equal, regT2, regT1);
168 emitJumpIfJSCell(regT2).linkTo(loop, this);
170 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
171 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
173 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
174 isInstance.link(this);
175 emitPutVirtualRegister(dst);
178 void JIT::emit_op_instanceof_custom(Instruction*)
180 // This always goes to slow path since we expect it to be rare.
184 void JIT::emit_op_is_empty(Instruction* currentInstruction)
186 int dst = currentInstruction[1].u.operand;
187 int value = currentInstruction[2].u.operand;
189 emitGetVirtualRegister(value, regT0);
190 compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
193 emitPutVirtualRegister(dst);
196 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
198 int dst = currentInstruction[1].u.operand;
199 int value = currentInstruction[2].u.operand;
201 emitGetVirtualRegister(value, regT0);
202 Jump isCell = emitJumpIfJSCell(regT0);
204 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
208 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
209 move(TrustedImm32(0), regT0);
210 Jump notMasqueradesAsUndefined = jump();
212 isMasqueradesAsUndefined.link(this);
213 emitLoadStructure(*vm(), regT0, regT1, regT2);
214 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
215 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
216 comparePtr(Equal, regT0, regT1, regT0);
218 notMasqueradesAsUndefined.link(this);
221 emitPutVirtualRegister(dst);
224 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
226 int dst = currentInstruction[1].u.operand;
227 int value = currentInstruction[2].u.operand;
229 emitGetVirtualRegister(value, regT0);
230 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
231 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
233 emitPutVirtualRegister(dst);
236 void JIT::emit_op_is_number(Instruction* currentInstruction)
238 int dst = currentInstruction[1].u.operand;
239 int value = currentInstruction[2].u.operand;
241 emitGetVirtualRegister(value, regT0);
242 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
244 emitPutVirtualRegister(dst);
247 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
249 int dst = currentInstruction[1].u.operand;
250 int value = currentInstruction[2].u.operand;
251 int type = currentInstruction[3].u.operand;
253 emitGetVirtualRegister(value, regT0);
254 Jump isNotCell = emitJumpIfNotJSCell(regT0);
256 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
260 isNotCell.link(this);
261 move(TrustedImm32(ValueFalse), regT0);
264 emitPutVirtualRegister(dst);
267 void JIT::emit_op_is_object(Instruction* currentInstruction)
269 int dst = currentInstruction[1].u.operand;
270 int value = currentInstruction[2].u.operand;
272 emitGetVirtualRegister(value, regT0);
273 Jump isNotCell = emitJumpIfNotJSCell(regT0);
275 compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
279 isNotCell.link(this);
280 move(TrustedImm32(ValueFalse), regT0);
283 emitPutVirtualRegister(dst);
286 void JIT::emit_op_ret(Instruction* currentInstruction)
288 ASSERT(callFrameRegister != regT1);
289 ASSERT(regT1 != returnValueGPR);
290 ASSERT(returnValueGPR != callFrameRegister);
292 // Return the result in %eax.
293 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
295 checkStackPointerAlignment();
296 emitRestoreCalleeSaves();
297 emitFunctionEpilogue();
301 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
303 int dst = currentInstruction[1].u.operand;
304 int src = currentInstruction[2].u.operand;
306 emitGetVirtualRegister(src, regT0);
308 Jump isImm = emitJumpIfNotJSCell(regT0);
309 addSlowCase(emitJumpIfCellObject(regT0));
313 emitPutVirtualRegister(dst);
317 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
319 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
320 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
321 callOperation(operationSetFunctionName, regT0, regT1);
324 void JIT::emit_op_strcat(Instruction* currentInstruction)
326 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
330 void JIT::emit_op_not(Instruction* currentInstruction)
332 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
334 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
335 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
336 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
337 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
338 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
339 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
341 emitPutVirtualRegister(currentInstruction[1].u.operand);
344 void JIT::emit_op_jfalse(Instruction* currentInstruction)
346 unsigned target = currentInstruction[2].u.operand;
348 GPRReg value = regT0;
349 GPRReg result = regT1;
350 GPRReg scratch = regT2;
351 bool shouldCheckMasqueradesAsUndefined = true;
353 emitGetVirtualRegister(currentInstruction[1].u.operand, value);
354 emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
356 addJump(branchTest32(Zero, result), target);
359 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
361 int src = currentInstruction[1].u.operand;
362 unsigned target = currentInstruction[2].u.operand;
364 emitGetVirtualRegister(src, regT0);
365 Jump isImmediate = emitJumpIfNotJSCell(regT0);
367 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
368 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
369 emitLoadStructure(*vm(), regT0, regT2, regT1);
370 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
371 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
372 Jump masqueradesGlobalObjectIsForeign = jump();
374 // Now handle the immediate cases - undefined & null
375 isImmediate.link(this);
376 and64(TrustedImm32(~TagBitUndefined), regT0);
377 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
379 isNotMasqueradesAsUndefined.link(this);
380 masqueradesGlobalObjectIsForeign.link(this);
382 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
384 int src = currentInstruction[1].u.operand;
385 unsigned target = currentInstruction[2].u.operand;
387 emitGetVirtualRegister(src, regT0);
388 Jump isImmediate = emitJumpIfNotJSCell(regT0);
390 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
391 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
392 emitLoadStructure(*vm(), regT0, regT2, regT1);
393 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
394 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
395 Jump wasNotImmediate = jump();
397 // Now handle the immediate cases - undefined & null
398 isImmediate.link(this);
399 and64(TrustedImm32(~TagBitUndefined), regT0);
400 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
402 wasNotImmediate.link(this);
405 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
407 int src = currentInstruction[1].u.operand;
408 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
409 unsigned target = currentInstruction[3].u.operand;
411 emitGetVirtualRegister(src, regT0);
412 CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
413 store32(TrustedImm32(1), ¤tInstruction[4].u.operand);
414 addJump(jump(), target);
418 void JIT::emit_op_eq(Instruction* currentInstruction)
420 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
421 emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
422 compare32(Equal, regT1, regT0, regT0);
424 emitPutVirtualRegister(currentInstruction[1].u.operand);
427 void JIT::emit_op_jtrue(Instruction* currentInstruction)
429 unsigned target = currentInstruction[2].u.operand;
431 GPRReg value = regT0;
432 GPRReg result = regT1;
433 GPRReg scratch = regT2;
434 bool shouldCheckMasqueradesAsUndefined = true;
435 emitGetVirtualRegister(currentInstruction[1].u.operand, value);
436 emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
437 addJump(branchTest32(NonZero, result), target);
440 void JIT::emit_op_neq(Instruction* currentInstruction)
442 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
443 emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
444 compare32(NotEqual, regT1, regT0, regT0);
447 emitPutVirtualRegister(currentInstruction[1].u.operand);
451 void JIT::emit_op_throw(Instruction* currentInstruction)
453 ASSERT(regT0 == returnValueGPR);
454 copyCalleeSavesToVMEntryFrameCalleeSavesBuffer(*vm());
455 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
456 callOperationNoExceptionCheck(operationThrow, regT0);
457 jumpToExceptionHandler(*vm());
460 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
462 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope);
466 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
468 int dst = currentInstruction[1].u.operand;
469 int src1 = currentInstruction[2].u.operand;
470 int src2 = currentInstruction[3].u.operand;
472 emitGetVirtualRegisters(src1, regT0, src2, regT1);
474 // Jump slow if both are cells (to cover strings).
477 addSlowCase(emitJumpIfJSCell(regT2));
479 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
481 Jump leftOK = emitJumpIfInt(regT0);
482 addSlowCase(emitJumpIfNumber(regT0));
484 Jump rightOK = emitJumpIfInt(regT1);
485 addSlowCase(emitJumpIfNumber(regT1));
488 if (type == OpStrictEq)
489 compare64(Equal, regT1, regT0, regT0);
491 compare64(NotEqual, regT1, regT0, regT0);
494 emitPutVirtualRegister(dst);
497 void JIT::emit_op_stricteq(Instruction* currentInstruction)
499 compileOpStrictEq(currentInstruction, OpStrictEq);
502 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
504 compileOpStrictEq(currentInstruction, OpNStrictEq);
507 void JIT::emit_op_to_number(Instruction* currentInstruction)
509 int dstVReg = currentInstruction[1].u.operand;
510 int srcVReg = currentInstruction[2].u.operand;
511 emitGetVirtualRegister(srcVReg, regT0);
513 addSlowCase(emitJumpIfNotNumber(regT0));
515 emitValueProfilingSite();
516 if (srcVReg != dstVReg)
517 emitPutVirtualRegister(dstVReg);
520 void JIT::emit_op_to_string(Instruction* currentInstruction)
522 int srcVReg = currentInstruction[2].u.operand;
523 emitGetVirtualRegister(srcVReg, regT0);
525 addSlowCase(emitJumpIfNotJSCell(regT0));
526 addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
528 emitPutVirtualRegister(currentInstruction[1].u.operand);
531 void JIT::emit_op_catch(Instruction* currentInstruction)
533 restoreCalleeSavesFromVMEntryFrameCalleeSavesBuffer(*vm());
535 move(TrustedImmPtr(m_vm), regT3);
536 load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
537 storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
539 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
541 callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
542 Jump isCatchableException = branchTest32(Zero, returnValueGPR);
543 jumpToExceptionHandler(*vm());
544 isCatchableException.link(this);
546 move(TrustedImmPtr(m_vm), regT3);
547 load64(Address(regT3, VM::exceptionOffset()), regT0);
548 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
549 emitPutVirtualRegister(currentInstruction[1].u.operand);
551 load64(Address(regT0, Exception::valueOffset()), regT0);
552 emitPutVirtualRegister(currentInstruction[2].u.operand);
555 void JIT::emit_op_assert(Instruction* currentInstruction)
557 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert);
561 void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction)
563 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment);
567 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
569 int currentScope = currentInstruction[2].u.operand;
570 emitGetVirtualRegister(currentScope, regT0);
571 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
572 emitStoreCell(currentInstruction[1].u.operand, regT0);
575 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
577 size_t tableIndex = currentInstruction[1].u.operand;
578 unsigned defaultOffset = currentInstruction[2].u.operand;
579 unsigned scrutinee = currentInstruction[3].u.operand;
581 // create jump table for switch destinations, track this switch statement.
582 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
583 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
584 jumpTable->ensureCTITable();
586 emitGetVirtualRegister(scrutinee, regT0);
587 callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
588 jump(returnValueGPR);
591 void JIT::emit_op_switch_char(Instruction* currentInstruction)
593 size_t tableIndex = currentInstruction[1].u.operand;
594 unsigned defaultOffset = currentInstruction[2].u.operand;
595 unsigned scrutinee = currentInstruction[3].u.operand;
597 // create jump table for switch destinations, track this switch statement.
598 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
599 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
600 jumpTable->ensureCTITable();
602 emitGetVirtualRegister(scrutinee, regT0);
603 callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
604 jump(returnValueGPR);
607 void JIT::emit_op_switch_string(Instruction* currentInstruction)
609 size_t tableIndex = currentInstruction[1].u.operand;
610 unsigned defaultOffset = currentInstruction[2].u.operand;
611 unsigned scrutinee = currentInstruction[3].u.operand;
613 // create jump table for switch destinations, track this switch statement.
614 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
615 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
617 emitGetVirtualRegister(scrutinee, regT0);
618 callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
619 jump(returnValueGPR);
622 void JIT::emit_op_debug(Instruction* currentInstruction)
624 load32(codeBlock()->debuggerRequestsAddress(), regT0);
625 Jump noDebuggerRequests = branchTest32(Zero, regT0);
626 callOperation(operationDebug, currentInstruction[1].u.operand);
627 noDebuggerRequests.link(this);
630 void JIT::emit_op_eq_null(Instruction* currentInstruction)
632 int dst = currentInstruction[1].u.operand;
633 int src1 = currentInstruction[2].u.operand;
635 emitGetVirtualRegister(src1, regT0);
636 Jump isImmediate = emitJumpIfNotJSCell(regT0);
638 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
639 move(TrustedImm32(0), regT0);
640 Jump wasNotMasqueradesAsUndefined = jump();
642 isMasqueradesAsUndefined.link(this);
643 emitLoadStructure(*vm(), regT0, regT2, regT1);
644 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
645 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
646 comparePtr(Equal, regT0, regT2, regT0);
647 Jump wasNotImmediate = jump();
649 isImmediate.link(this);
651 and64(TrustedImm32(~TagBitUndefined), regT0);
652 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
654 wasNotImmediate.link(this);
655 wasNotMasqueradesAsUndefined.link(this);
658 emitPutVirtualRegister(dst);
662 void JIT::emit_op_neq_null(Instruction* currentInstruction)
664 int dst = currentInstruction[1].u.operand;
665 int src1 = currentInstruction[2].u.operand;
667 emitGetVirtualRegister(src1, regT0);
668 Jump isImmediate = emitJumpIfNotJSCell(regT0);
670 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
671 move(TrustedImm32(1), regT0);
672 Jump wasNotMasqueradesAsUndefined = jump();
674 isMasqueradesAsUndefined.link(this);
675 emitLoadStructure(*vm(), regT0, regT2, regT1);
676 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
677 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
678 comparePtr(NotEqual, regT0, regT2, regT0);
679 Jump wasNotImmediate = jump();
681 isImmediate.link(this);
683 and64(TrustedImm32(~TagBitUndefined), regT0);
684 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
686 wasNotImmediate.link(this);
687 wasNotMasqueradesAsUndefined.link(this);
690 emitPutVirtualRegister(dst);
693 void JIT::emit_op_enter(Instruction*)
695 // Even though CTI doesn't use them, we initialize our constant
696 // registers to zap stale pointers, to avoid unnecessarily prolonging
697 // object lifetime and increasing GC pressure.
698 size_t count = m_codeBlock->m_numVars;
699 for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
700 emitInitRegister(virtualRegisterForLocal(j).offset());
702 emitWriteBarrier(m_codeBlock);
704 emitEnterOptimizationCheck();
707 void JIT::emit_op_get_scope(Instruction* currentInstruction)
709 int dst = currentInstruction[1].u.operand;
710 emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
711 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
712 emitStoreCell(dst, regT0);
715 void JIT::emit_op_to_this(Instruction* currentInstruction)
717 WriteBarrierBase<Structure>* cachedStructure = ¤tInstruction[2].u.structure;
718 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
720 emitJumpSlowCaseIfNotJSCell(regT1);
722 addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
723 loadPtr(cachedStructure, regT2);
724 addSlowCase(branchTestPtr(Zero, regT2));
725 load32(Address(regT2, Structure::structureIDOffset()), regT2);
726 addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
729 void JIT::emit_op_create_this(Instruction* currentInstruction)
731 int callee = currentInstruction[2].u.operand;
732 WriteBarrierBase<JSCell>* cachedFunction = ¤tInstruction[4].u.jsCell;
733 RegisterID calleeReg = regT0;
734 RegisterID rareDataReg = regT4;
735 RegisterID resultReg = regT0;
736 RegisterID allocatorReg = regT1;
737 RegisterID structureReg = regT2;
738 RegisterID cachedFunctionReg = regT4;
739 RegisterID scratchReg = regT3;
741 emitGetVirtualRegister(callee, calleeReg);
742 addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
743 loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
744 addSlowCase(branchTestPtr(Zero, rareDataReg));
745 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
746 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
747 addSlowCase(branchTestPtr(Zero, allocatorReg));
749 loadPtr(cachedFunction, cachedFunctionReg);
750 Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
751 addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
752 hasSeenMultipleCallees.link(this);
755 emitAllocateJSObject(resultReg, nullptr, allocatorReg, structureReg, TrustedImmPtr(0), scratchReg, slowCases);
756 emitGetVirtualRegister(callee, scratchReg);
757 loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
758 load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
759 emitInitializeInlineStorage(resultReg, scratchReg);
760 addSlowCase(slowCases);
761 emitPutVirtualRegister(currentInstruction[1].u.operand);
764 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
766 linkSlowCase(iter); // Callee::m_type != JSFunctionType.
767 linkSlowCase(iter); // doesn't have rare data
768 linkSlowCase(iter); // doesn't have an allocation profile
769 linkSlowCase(iter); // allocation failed (no allocator)
770 linkSlowCase(iter); // allocation failed (allocator empty)
771 linkSlowCase(iter); // cached function didn't match
773 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
777 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
779 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
780 addSlowCase(branchTest64(Zero, regT0));
783 void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
786 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error);
793 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
800 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
804 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
808 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
812 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
816 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
820 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
823 callOperation(operationCompareEq, regT0, regT1);
824 emitTagBool(returnValueGPR);
825 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
828 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
831 callOperation(operationCompareEq, regT0, regT1);
832 xor32(TrustedImm32(0x1), regT0);
833 emitTagBool(returnValueGPR);
834 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
837 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
842 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
846 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
851 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
855 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
857 int dst = currentInstruction[1].u.operand;
858 int value = currentInstruction[2].u.operand;
859 int proto = currentInstruction[3].u.operand;
861 linkSlowCaseIfNotJSCell(iter, value);
862 linkSlowCaseIfNotJSCell(iter, proto);
865 emitGetVirtualRegister(value, regT0);
866 emitGetVirtualRegister(proto, regT1);
867 callOperation(operationInstanceOf, dst, regT0, regT1);
870 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
872 int dst = currentInstruction[1].u.operand;
873 int value = currentInstruction[2].u.operand;
874 int constructor = currentInstruction[3].u.operand;
875 int hasInstanceValue = currentInstruction[4].u.operand;
878 emitGetVirtualRegister(value, regT0);
879 emitGetVirtualRegister(constructor, regT1);
880 emitGetVirtualRegister(hasInstanceValue, regT2);
881 callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
882 emitTagBool(returnValueGPR);
883 emitPutVirtualRegister(dst, returnValueGPR);
886 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
890 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
894 void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
896 linkSlowCase(iter); // Not JSCell.
897 linkSlowCase(iter); // Not JSString.
899 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string);
903 #endif // USE(JSVALUE64)
905 void JIT::emit_op_loop_hint(Instruction*)
907 // Emit the JIT optimization check:
908 if (canBeOptimized()) {
909 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
910 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
914 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
917 // Emit the slow path for the JIT optimization check:
918 if (canBeOptimized()) {
921 copyCalleeSavesFromFrameOrRegisterToVMEntryFrameCalleeSavesBuffer(*vm());
923 callOperation(operationOptimize, m_bytecodeOffset);
924 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
925 if (!ASSERT_DISABLED) {
926 Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
927 abortWithReason(JITUnreasonableLoopHintJumpTarget);
930 jump(returnValueGPR);
931 noOptimizedEntry.link(this);
933 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
940 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
942 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_static_error);
946 void JIT::emit_op_check_traps(Instruction*)
948 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
951 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
954 callOperation(operationHandleTraps);
957 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
959 callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand));
962 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
965 int dst = currentInstruction[1].u.operand;
968 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
970 emitLoadPayload(currentInstruction[2].u.operand, regT0);
972 FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
974 OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
975 if (opcodeID == op_new_func)
976 callOperation(operationNewFunction, dst, regT0, funcExec);
977 else if (opcodeID == op_new_generator_func)
978 callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
980 ASSERT(opcodeID == op_new_async_func);
981 callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
985 void JIT::emit_op_new_func(Instruction* currentInstruction)
987 emitNewFuncCommon(currentInstruction);
990 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
992 emitNewFuncCommon(currentInstruction);
995 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
997 emitNewFuncCommon(currentInstruction);
1000 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
1002 Jump notUndefinedScope;
1003 int dst = currentInstruction[1].u.operand;
1005 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1006 notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
1007 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
1009 emitLoadPayload(currentInstruction[2].u.operand, regT0);
1010 notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
1011 emitStore(dst, jsUndefined());
1014 notUndefinedScope.link(this);
1016 FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
1017 OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode);
1019 if (opcodeID == op_new_func_exp)
1020 callOperation(operationNewFunction, dst, regT0, function);
1021 else if (opcodeID == op_new_generator_func_exp)
1022 callOperation(operationNewGeneratorFunction, dst, regT0, function);
1024 ASSERT(opcodeID == op_new_async_func_exp);
1025 callOperation(operationNewAsyncFunction, dst, regT0, function);
1031 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1033 emitNewFuncExprCommon(currentInstruction);
1036 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1038 emitNewFuncExprCommon(currentInstruction);
1041 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1043 emitNewFuncExprCommon(currentInstruction);
1046 void JIT::emit_op_new_array(Instruction* currentInstruction)
1048 int dst = currentInstruction[1].u.operand;
1049 int valuesIndex = currentInstruction[2].u.operand;
1050 int size = currentInstruction[3].u.operand;
1051 addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1052 callOperation(operationNewArrayWithProfile, dst,
1053 currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1056 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1058 int dst = currentInstruction[1].u.operand;
1059 int sizeIndex = currentInstruction[2].u.operand;
1061 emitGetVirtualRegister(sizeIndex, regT0);
1062 callOperation(operationNewArrayWithSizeAndProfile, dst,
1063 currentInstruction[3].u.arrayAllocationProfile, regT0);
1065 emitLoad(sizeIndex, regT1, regT0);
1066 callOperation(operationNewArrayWithSizeAndProfile, dst,
1067 currentInstruction[3].u.arrayAllocationProfile, regT1, regT0);
1071 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1073 int dst = currentInstruction[1].u.operand;
1074 int valuesIndex = currentInstruction[2].u.operand;
1075 int size = currentInstruction[3].u.operand;
1076 const JSValue* values = codeBlock()->constantBuffer(valuesIndex);
1077 callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size);
1080 void JIT::emit_op_new_array_with_spread(Instruction* currentInstruction)
1082 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_array_with_spread);
1083 slowPathCall.call();
1086 void JIT::emit_op_spread(Instruction* currentInstruction)
1088 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_spread);
1089 slowPathCall.call();
1093 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1095 int dst = currentInstruction[1].u.operand;
1096 int base = currentInstruction[2].u.operand;
1097 int enumerator = currentInstruction[4].u.operand;
1099 emitGetVirtualRegister(base, regT0);
1100 emitGetVirtualRegister(enumerator, regT1);
1101 emitJumpSlowCaseIfNotJSCell(regT0, base);
1103 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1104 addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1106 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1107 emitPutVirtualRegister(dst);
1110 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1112 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1114 PatchableJump badType;
1116 // FIXME: Add support for other types like TypedArrays and Arguments.
1117 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1118 JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1119 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1122 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1124 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1125 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1127 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1129 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1130 m_codeBlock, patchBuffer,
1131 ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1133 MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1134 MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric));
1137 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1139 int dst = currentInstruction[1].u.operand;
1140 int base = currentInstruction[2].u.operand;
1141 int property = currentInstruction[3].u.operand;
1142 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1143 ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1145 emitGetVirtualRegisters(base, regT0, property, regT1);
1147 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1148 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1149 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1150 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1151 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1152 // extending since it makes it easier to re-tag the value in the slow case.
1153 zeroExtend32ToPtr(regT1, regT1);
1155 emitJumpSlowCaseIfNotJSCell(regT0, base);
1156 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1157 and32(TrustedImm32(IndexingShapeMask), regT2);
1159 JITArrayMode mode = chooseArrayMode(profile);
1160 PatchableJump badType;
1162 // FIXME: Add support for other types like TypedArrays and Arguments.
1163 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1164 JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1166 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1168 addSlowCase(badType);
1169 addSlowCase(slowCases);
1171 Label done = label();
1173 emitPutVirtualRegister(dst);
1175 Label nextHotPath = label();
1177 m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1180 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1182 int dst = currentInstruction[1].u.operand;
1183 int base = currentInstruction[2].u.operand;
1184 int property = currentInstruction[3].u.operand;
1185 ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1187 linkSlowCaseIfNotJSCell(iter, base); // base cell check
1188 linkSlowCase(iter); // base array check
1189 linkSlowCase(iter); // vector length check
1190 linkSlowCase(iter); // empty value
1192 Label slowPath = label();
1194 emitGetVirtualRegister(base, regT0);
1195 emitGetVirtualRegister(property, regT1);
1196 Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1198 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1199 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1200 m_byValInstructionIndex++;
1203 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1205 int dst = currentInstruction[1].u.operand;
1206 int base = currentInstruction[2].u.operand;
1207 int index = currentInstruction[4].u.operand;
1208 int enumerator = currentInstruction[5].u.operand;
1210 // Check that base is a cell
1211 emitGetVirtualRegister(base, regT0);
1212 emitJumpSlowCaseIfNotJSCell(regT0, base);
1214 // Check the structure
1215 emitGetVirtualRegister(enumerator, regT2);
1216 load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1217 addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1219 // Compute the offset
1220 emitGetVirtualRegister(index, regT1);
1221 // If index is less than the enumerator's cached inline storage, then it's an inline access
1222 Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1223 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1224 signExtend32ToPtr(regT1, regT1);
1225 load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1229 // Otherwise it's out of line
1230 outOfLineAccess.link(this);
1231 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1232 sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1234 signExtend32ToPtr(regT1, regT1);
1235 int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1236 load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1239 emitValueProfilingSite();
1240 emitPutVirtualRegister(dst, regT0);
1243 void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1245 int base = currentInstruction[2].u.operand;
1246 linkSlowCaseIfNotJSCell(iter, base);
1249 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname);
1250 slowPathCall.call();
1253 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1255 int dst = currentInstruction[1].u.operand;
1256 int enumerator = currentInstruction[2].u.operand;
1257 int index = currentInstruction[3].u.operand;
1259 emitGetVirtualRegister(index, regT0);
1260 emitGetVirtualRegister(enumerator, regT1);
1261 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1263 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1266 inBounds.link(this);
1268 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1269 signExtend32ToPtr(regT0, regT0);
1270 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1273 emitPutVirtualRegister(dst);
1276 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1278 int dst = currentInstruction[1].u.operand;
1279 int enumerator = currentInstruction[2].u.operand;
1280 int index = currentInstruction[3].u.operand;
1282 emitGetVirtualRegister(index, regT0);
1283 emitGetVirtualRegister(enumerator, regT1);
1284 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1286 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1289 inBounds.link(this);
1291 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1292 signExtend32ToPtr(regT0, regT0);
1293 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1296 emitPutVirtualRegister(dst);
1299 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1301 TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1302 int valueToProfile = currentInstruction[1].u.operand;
1304 emitGetVirtualRegister(valueToProfile, regT0);
1308 jumpToEnd.append(branchTest64(Zero, regT0));
1310 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1311 // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1312 if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1313 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1314 else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1315 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1316 else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1318 and64(TrustedImm32(~1), regT1);
1319 jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1320 } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1321 jumpToEnd.append(emitJumpIfInt(regT0));
1322 else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1323 jumpToEnd.append(emitJumpIfNumber(regT0));
1324 else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1325 Jump isNotCell = emitJumpIfNotJSCell(regT0);
1326 jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1327 isNotCell.link(this);
1330 // Load the type profiling log into T2.
1331 TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1332 move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1333 // Load the next log entry into T1.
1334 loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1336 // Store the JSValue onto the log entry.
1337 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1339 // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1340 Jump notCell = emitJumpIfNotJSCell(regT0);
1341 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1342 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1343 Jump skipIsCell = jump();
1345 store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1346 skipIsCell.link(this);
1348 // Store the typeLocation on the log entry.
1349 move(TrustedImmPtr(cachedTypeLocation), regT0);
1350 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1352 // Increment the current log entry.
1353 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1354 store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1355 Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1356 // Clear the log if we're at the end of the log.
1357 callOperation(operationProcessTypeProfilerLog);
1358 skipClearLog.link(this);
1360 jumpToEnd.link(this);
1363 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1365 updateTopCallFrame();
1366 static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1367 GPRReg shadowPacketReg = regT0;
1368 GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1369 GPRReg scratch2Reg = regT2;
1370 ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1371 emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1372 logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1375 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1377 updateTopCallFrame();
1378 static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1379 GPRReg shadowPacketReg = regT0;
1380 GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1381 GPRReg scratch2Reg = regT2;
1382 ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1383 emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1384 emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1385 logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1388 #endif // USE(JSVALUE64)
1390 void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction)
1392 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length);
1393 slowPathCall.call();
1396 void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1401 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property);
1402 slowPathCall.call();
1405 void JIT::emit_op_has_generic_property(Instruction* currentInstruction)
1407 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property);
1408 slowPathCall.call();
1411 void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction)
1413 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator);
1414 slowPathCall.call();
1417 void JIT::emit_op_to_index_string(Instruction* currentInstruction)
1419 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string);
1420 slowPathCall.call();
1423 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1425 BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1427 basicBlockLocation->emitExecuteCode(*this);
1429 basicBlockLocation->emitExecuteCode(*this, regT0);
1433 void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction)
1435 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments);
1436 slowPathCall.call();
1439 void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction)
1441 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments);
1442 slowPathCall.call();
1445 void JIT::emit_op_create_cloned_arguments(Instruction* currentInstruction)
1447 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_cloned_arguments);
1448 slowPathCall.call();
1451 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1453 int dst = currentInstruction[1].u.operand;
1454 load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1455 sub32(TrustedImm32(1), regT0);
1456 JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1457 boxInt32(regT0, result);
1458 emitPutVirtualRegister(dst, result);
1461 void JIT::emit_op_create_rest(Instruction* currentInstruction)
1463 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_rest);
1464 slowPathCall.call();
1467 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1469 int dst = currentInstruction[1].u.operand;
1470 unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1471 load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1472 sub32(TrustedImm32(1), regT0);
1473 Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1474 sub32(Imm32(numParamsToSkip), regT0);
1476 boxInt32(regT0, JSValueRegs(regT0));
1480 zeroLength.link(this);
1482 move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1484 move(TrustedImm32(0), regT0);
1489 emitPutVirtualRegister(dst, regT0);
1491 move(TrustedImm32(JSValue::Int32Tag), regT1);
1492 emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1496 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1498 int dst = currentInstruction[1].u.operand;
1499 int index = currentInstruction[2].u.operand;
1501 JSValueRegs resultRegs(regT0);
1503 JSValueRegs resultRegs(regT1, regT0);
1506 load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1507 Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1508 loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1511 argumentOutOfBounds.link(this);
1512 moveValue(jsUndefined(), resultRegs);
1515 emitValueProfilingSite();
1516 emitPutVirtualRegister(dst, resultRegs);
1521 #endif // ENABLE(JIT)