2 * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "BasicBlockLocation.h"
32 #include "BytecodeStructs.h"
33 #include "Exception.h"
35 #include "InterpreterInlines.h"
36 #include "JITInlines.h"
39 #include "JSFunction.h"
40 #include "JSPropertyNameEnumerator.h"
41 #include "LinkBuffer.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "SlowPathCall.h"
44 #include "SuperSampler.h"
45 #include "ThunkGenerators.h"
46 #include "TypeLocation.h"
47 #include "TypeProfilerLog.h"
48 #include "VirtualRegister.h"
55 void JIT::emit_op_mov(Instruction* currentInstruction)
57 int dst = currentInstruction[1].u.operand;
58 int src = currentInstruction[2].u.operand;
60 emitGetVirtualRegister(src, regT0);
61 emitPutVirtualRegister(dst);
65 void JIT::emit_op_end(Instruction* currentInstruction)
67 RELEASE_ASSERT(returnValueGPR != callFrameRegister);
68 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
69 emitRestoreCalleeSaves();
70 emitFunctionEpilogue();
74 void JIT::emit_op_jmp(Instruction* currentInstruction)
76 unsigned target = currentInstruction[1].u.operand;
77 addJump(jump(), target);
80 void JIT::emit_op_new_object(Instruction* currentInstruction)
82 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
83 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
84 Allocator allocator = subspaceFor<JSFinalObject>(*m_vm)->allocatorForNonVirtual(allocationSize, AllocatorForMode::AllocatorIfExists);
86 RegisterID resultReg = regT0;
87 RegisterID allocatorReg = regT1;
88 RegisterID scratchReg = regT2;
94 auto butterfly = TrustedImmPtr(nullptr);
95 auto mask = TrustedImm32(0);
96 emitAllocateJSObject(resultReg, JITAllocator::constant(allocator), allocatorReg, TrustedImmPtr(structure), butterfly, mask, scratchReg, slowCases);
97 emitInitializeInlineStorage(resultReg, structure->inlineCapacity());
98 addSlowCase(slowCases);
99 emitPutVirtualRegister(currentInstruction[1].u.operand);
103 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
105 linkAllSlowCases(iter);
107 int dst = currentInstruction[1].u.operand;
108 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
109 callOperation(operationNewObject, structure);
110 emitStoreCell(dst, returnValueGPR);
113 void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction)
115 auto& bytecode = *reinterpret_cast<OpOverridesHasInstance*>(currentInstruction);
116 int dst = bytecode.dst();
117 int constructor = bytecode.constructor();
118 int hasInstanceValue = bytecode.hasInstanceValue();
120 emitGetVirtualRegister(hasInstanceValue, regT0);
122 // We don't jump if we know what Symbol.hasInstance would do.
123 Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction()));
125 emitGetVirtualRegister(constructor, regT0);
127 // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function.
128 test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0);
132 customhasInstanceValue.link(this);
133 move(TrustedImm32(ValueTrue), regT0);
136 emitPutVirtualRegister(dst);
139 void JIT::emit_op_instanceof(Instruction* currentInstruction)
141 auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
142 int dst = bytecode.dst();
143 int value = bytecode.value();
144 int proto = bytecode.prototype();
146 // Load the operands (baseVal, proto, and value respectively) into registers.
147 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
148 emitGetVirtualRegister(value, regT2);
149 emitGetVirtualRegister(proto, regT1);
151 // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance.
152 emitJumpSlowCaseIfNotJSCell(regT2, value);
153 emitJumpSlowCaseIfNotJSCell(regT1, proto);
155 // Check that prototype is an object
156 addSlowCase(emitJumpIfCellNotObject(regT1));
158 // Optimistically load the result true, and start looping.
159 // Initially, regT1 still contains proto and regT2 still contains value.
160 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
161 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
164 addSlowCase(branch8(Equal, Address(regT2, JSCell::typeInfoTypeOffset()), TrustedImm32(ProxyObjectType)));
166 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
167 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
168 emitLoadStructure(*vm(), regT2, regT4, regT3);
169 load64(Address(regT4, Structure::prototypeOffset()), regT4);
170 auto hasMonoProto = branchTest64(NonZero, regT4);
171 load64(Address(regT2, offsetRelativeToBase(knownPolyProtoOffset)), regT4);
172 hasMonoProto.link(this);
174 Jump isInstance = branchPtr(Equal, regT2, regT1);
175 emitJumpIfJSCell(regT2).linkTo(loop, this);
177 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
178 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
180 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
181 isInstance.link(this);
182 emitPutVirtualRegister(dst);
185 void JIT::emit_op_instanceof_custom(Instruction*)
187 // This always goes to slow path since we expect it to be rare.
191 void JIT::emit_op_is_empty(Instruction* currentInstruction)
193 int dst = currentInstruction[1].u.operand;
194 int value = currentInstruction[2].u.operand;
196 emitGetVirtualRegister(value, regT0);
197 compare64(Equal, regT0, TrustedImm32(JSValue::encode(JSValue())), regT0);
200 emitPutVirtualRegister(dst);
203 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
205 int dst = currentInstruction[1].u.operand;
206 int value = currentInstruction[2].u.operand;
208 emitGetVirtualRegister(value, regT0);
209 Jump isCell = emitJumpIfJSCell(regT0);
211 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
215 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
216 move(TrustedImm32(0), regT0);
217 Jump notMasqueradesAsUndefined = jump();
219 isMasqueradesAsUndefined.link(this);
220 emitLoadStructure(*vm(), regT0, regT1, regT2);
221 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
222 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
223 comparePtr(Equal, regT0, regT1, regT0);
225 notMasqueradesAsUndefined.link(this);
228 emitPutVirtualRegister(dst);
231 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
233 int dst = currentInstruction[1].u.operand;
234 int value = currentInstruction[2].u.operand;
236 emitGetVirtualRegister(value, regT0);
237 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
238 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
240 emitPutVirtualRegister(dst);
243 void JIT::emit_op_is_number(Instruction* currentInstruction)
245 int dst = currentInstruction[1].u.operand;
246 int value = currentInstruction[2].u.operand;
248 emitGetVirtualRegister(value, regT0);
249 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
251 emitPutVirtualRegister(dst);
254 void JIT::emit_op_is_cell_with_type(Instruction* currentInstruction)
256 int dst = currentInstruction[1].u.operand;
257 int value = currentInstruction[2].u.operand;
258 int type = currentInstruction[3].u.operand;
260 emitGetVirtualRegister(value, regT0);
261 Jump isNotCell = emitJumpIfNotJSCell(regT0);
263 compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(type), regT0);
267 isNotCell.link(this);
268 move(TrustedImm32(ValueFalse), regT0);
271 emitPutVirtualRegister(dst);
274 void JIT::emit_op_is_object(Instruction* currentInstruction)
276 int dst = currentInstruction[1].u.operand;
277 int value = currentInstruction[2].u.operand;
279 emitGetVirtualRegister(value, regT0);
280 Jump isNotCell = emitJumpIfNotJSCell(regT0);
282 compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0);
286 isNotCell.link(this);
287 move(TrustedImm32(ValueFalse), regT0);
290 emitPutVirtualRegister(dst);
293 void JIT::emit_op_ret(Instruction* currentInstruction)
295 ASSERT(callFrameRegister != regT1);
296 ASSERT(regT1 != returnValueGPR);
297 ASSERT(returnValueGPR != callFrameRegister);
299 // Return the result in %eax.
300 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
302 checkStackPointerAlignment();
303 emitRestoreCalleeSaves();
304 emitFunctionEpilogue();
308 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
310 int dst = currentInstruction[1].u.operand;
311 int src = currentInstruction[2].u.operand;
313 emitGetVirtualRegister(src, regT0);
315 Jump isImm = emitJumpIfNotJSCell(regT0);
316 addSlowCase(emitJumpIfCellObject(regT0));
320 emitPutVirtualRegister(dst);
324 void JIT::emit_op_set_function_name(Instruction* currentInstruction)
326 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
327 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
328 callOperation(operationSetFunctionName, regT0, regT1);
331 void JIT::emit_op_not(Instruction* currentInstruction)
333 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
335 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
336 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
337 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
338 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
339 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
340 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
342 emitPutVirtualRegister(currentInstruction[1].u.operand);
345 void JIT::emit_op_jfalse(Instruction* currentInstruction)
347 unsigned target = currentInstruction[2].u.operand;
349 GPRReg value = regT0;
350 GPRReg result = regT1;
351 GPRReg scratch = regT2;
352 bool shouldCheckMasqueradesAsUndefined = true;
354 emitGetVirtualRegister(currentInstruction[1].u.operand, value);
355 emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
357 addJump(branchTest32(Zero, result), target);
360 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
362 int src = currentInstruction[1].u.operand;
363 unsigned target = currentInstruction[2].u.operand;
365 emitGetVirtualRegister(src, regT0);
366 Jump isImmediate = emitJumpIfNotJSCell(regT0);
368 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
369 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
370 emitLoadStructure(*vm(), regT0, regT2, regT1);
371 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
372 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
373 Jump masqueradesGlobalObjectIsForeign = jump();
375 // Now handle the immediate cases - undefined & null
376 isImmediate.link(this);
377 and64(TrustedImm32(~TagBitUndefined), regT0);
378 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
380 isNotMasqueradesAsUndefined.link(this);
381 masqueradesGlobalObjectIsForeign.link(this);
383 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
385 int src = currentInstruction[1].u.operand;
386 unsigned target = currentInstruction[2].u.operand;
388 emitGetVirtualRegister(src, regT0);
389 Jump isImmediate = emitJumpIfNotJSCell(regT0);
391 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
392 addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
393 emitLoadStructure(*vm(), regT0, regT2, regT1);
394 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
395 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
396 Jump wasNotImmediate = jump();
398 // Now handle the immediate cases - undefined & null
399 isImmediate.link(this);
400 and64(TrustedImm32(~TagBitUndefined), regT0);
401 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
403 wasNotImmediate.link(this);
406 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
408 int src = currentInstruction[1].u.operand;
409 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
410 unsigned target = currentInstruction[3].u.operand;
412 emitGetVirtualRegister(src, regT0);
413 CCallHelpers::Jump equal = branchPtr(Equal, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr)));
414 store32(TrustedImm32(1), ¤tInstruction[4].u.operand);
415 addJump(jump(), target);
419 void JIT::emit_op_eq(Instruction* currentInstruction)
421 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
422 emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
423 compare32(Equal, regT1, regT0, regT0);
425 emitPutVirtualRegister(currentInstruction[1].u.operand);
428 void JIT::emit_op_jtrue(Instruction* currentInstruction)
430 unsigned target = currentInstruction[2].u.operand;
432 GPRReg value = regT0;
433 GPRReg result = regT1;
434 GPRReg scratch = regT2;
435 bool shouldCheckMasqueradesAsUndefined = true;
436 emitGetVirtualRegister(currentInstruction[1].u.operand, value);
437 emitConvertValueToBoolean(*vm(), JSValueRegs(value), result, scratch, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
438 addJump(branchTest32(NonZero, result), target);
441 void JIT::emit_op_neq(Instruction* currentInstruction)
443 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
444 emitJumpSlowCaseIfNotInt(regT0, regT1, regT2);
445 compare32(NotEqual, regT1, regT0, regT0);
448 emitPutVirtualRegister(currentInstruction[1].u.operand);
452 void JIT::emit_op_throw(Instruction* currentInstruction)
454 ASSERT(regT0 == returnValueGPR);
455 copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
456 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
457 callOperationNoExceptionCheck(operationThrow, regT0);
458 jumpToExceptionHandler(*vm());
461 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
463 int dst = currentInstruction[1].u.operand;
464 int src1 = currentInstruction[2].u.operand;
465 int src2 = currentInstruction[3].u.operand;
467 emitGetVirtualRegisters(src1, regT0, src2, regT1);
469 // Jump slow if both are cells (to cover strings).
472 addSlowCase(emitJumpIfJSCell(regT2));
474 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
476 Jump leftOK = emitJumpIfInt(regT0);
477 addSlowCase(emitJumpIfNumber(regT0));
479 Jump rightOK = emitJumpIfInt(regT1);
480 addSlowCase(emitJumpIfNumber(regT1));
483 if (type == OpStrictEq)
484 compare64(Equal, regT1, regT0, regT0);
486 compare64(NotEqual, regT1, regT0, regT0);
489 emitPutVirtualRegister(dst);
492 void JIT::emit_op_stricteq(Instruction* currentInstruction)
494 compileOpStrictEq(currentInstruction, OpStrictEq);
497 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
499 compileOpStrictEq(currentInstruction, OpNStrictEq);
502 void JIT::emit_op_to_number(Instruction* currentInstruction)
504 int dstVReg = currentInstruction[1].u.operand;
505 int srcVReg = currentInstruction[2].u.operand;
506 emitGetVirtualRegister(srcVReg, regT0);
508 addSlowCase(emitJumpIfNotNumber(regT0));
510 emitValueProfilingSite();
511 if (srcVReg != dstVReg)
512 emitPutVirtualRegister(dstVReg);
515 void JIT::emit_op_to_string(Instruction* currentInstruction)
517 int srcVReg = currentInstruction[2].u.operand;
518 emitGetVirtualRegister(srcVReg, regT0);
520 addSlowCase(emitJumpIfNotJSCell(regT0));
521 addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
523 emitPutVirtualRegister(currentInstruction[1].u.operand);
526 void JIT::emit_op_to_object(Instruction* currentInstruction)
528 int dstVReg = currentInstruction[1].u.operand;
529 int srcVReg = currentInstruction[2].u.operand;
530 emitGetVirtualRegister(srcVReg, regT0);
532 addSlowCase(emitJumpIfNotJSCell(regT0));
533 addSlowCase(branch8(Below, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType)));
535 emitValueProfilingSite();
536 if (srcVReg != dstVReg)
537 emitPutVirtualRegister(dstVReg);
540 void JIT::emit_op_catch(Instruction* currentInstruction)
542 restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
544 move(TrustedImmPtr(m_vm), regT3);
545 load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister);
546 storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset()));
548 addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister);
550 callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler);
551 Jump isCatchableException = branchTest32(Zero, returnValueGPR);
552 jumpToExceptionHandler(*vm());
553 isCatchableException.link(this);
555 move(TrustedImmPtr(m_vm), regT3);
556 load64(Address(regT3, VM::exceptionOffset()), regT0);
557 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
558 emitPutVirtualRegister(currentInstruction[1].u.operand);
560 load64(Address(regT0, Exception::valueOffset()), regT0);
561 emitPutVirtualRegister(currentInstruction[2].u.operand);
564 // FIXME: consider inline caching the process of doing OSR entry, including
565 // argument type proofs, storing locals to the buffer, etc
566 // https://bugs.webkit.org/show_bug.cgi?id=175598
568 ValueProfileAndOperandBuffer* buffer = static_cast<ValueProfileAndOperandBuffer*>(currentInstruction[3].u.pointer);
569 if (buffer || !shouldEmitProfiling())
570 callOperation(operationTryOSREnterAtCatch, m_bytecodeOffset);
572 callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
573 auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
574 emitRestoreCalleeSaves();
575 jump(returnValueGPR, NoPtrTag);
576 skipOSREntry.link(this);
577 if (buffer && shouldEmitProfiling()) {
578 buffer->forEach([&] (ValueProfileAndOperand& profile) {
579 JSValueRegs regs(regT0);
580 emitGetVirtualRegister(profile.m_operand, regs);
581 emitValueProfilingSite(profile.m_profile);
584 #endif // ENABLE(DFG_JIT)
587 void JIT::emit_op_identity_with_profile(Instruction*)
589 // We don't need to do anything here...
592 void JIT::emit_op_get_parent_scope(Instruction* currentInstruction)
594 int currentScope = currentInstruction[2].u.operand;
595 emitGetVirtualRegister(currentScope, regT0);
596 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
597 emitStoreCell(currentInstruction[1].u.operand, regT0);
600 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
602 size_t tableIndex = currentInstruction[1].u.operand;
603 unsigned defaultOffset = currentInstruction[2].u.operand;
604 unsigned scrutinee = currentInstruction[3].u.operand;
606 // create jump table for switch destinations, track this switch statement.
607 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
608 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
609 jumpTable->ensureCTITable();
611 emitGetVirtualRegister(scrutinee, regT0);
612 callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
613 jump(returnValueGPR, NoPtrTag);
616 void JIT::emit_op_switch_char(Instruction* currentInstruction)
618 size_t tableIndex = currentInstruction[1].u.operand;
619 unsigned defaultOffset = currentInstruction[2].u.operand;
620 unsigned scrutinee = currentInstruction[3].u.operand;
622 // create jump table for switch destinations, track this switch statement.
623 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
624 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
625 jumpTable->ensureCTITable();
627 emitGetVirtualRegister(scrutinee, regT0);
628 callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
629 jump(returnValueGPR, NoPtrTag);
632 void JIT::emit_op_switch_string(Instruction* currentInstruction)
634 size_t tableIndex = currentInstruction[1].u.operand;
635 unsigned defaultOffset = currentInstruction[2].u.operand;
636 unsigned scrutinee = currentInstruction[3].u.operand;
638 // create jump table for switch destinations, track this switch statement.
639 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
640 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
642 emitGetVirtualRegister(scrutinee, regT0);
643 callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
644 jump(returnValueGPR, NoPtrTag);
647 void JIT::emit_op_debug(Instruction* currentInstruction)
649 load32(codeBlock()->debuggerRequestsAddress(), regT0);
650 Jump noDebuggerRequests = branchTest32(Zero, regT0);
651 callOperation(operationDebug, currentInstruction[1].u.operand);
652 noDebuggerRequests.link(this);
655 void JIT::emit_op_eq_null(Instruction* currentInstruction)
657 int dst = currentInstruction[1].u.operand;
658 int src1 = currentInstruction[2].u.operand;
660 emitGetVirtualRegister(src1, regT0);
661 Jump isImmediate = emitJumpIfNotJSCell(regT0);
663 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
664 move(TrustedImm32(0), regT0);
665 Jump wasNotMasqueradesAsUndefined = jump();
667 isMasqueradesAsUndefined.link(this);
668 emitLoadStructure(*vm(), regT0, regT2, regT1);
669 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
670 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
671 comparePtr(Equal, regT0, regT2, regT0);
672 Jump wasNotImmediate = jump();
674 isImmediate.link(this);
676 and64(TrustedImm32(~TagBitUndefined), regT0);
677 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
679 wasNotImmediate.link(this);
680 wasNotMasqueradesAsUndefined.link(this);
683 emitPutVirtualRegister(dst);
687 void JIT::emit_op_neq_null(Instruction* currentInstruction)
689 int dst = currentInstruction[1].u.operand;
690 int src1 = currentInstruction[2].u.operand;
692 emitGetVirtualRegister(src1, regT0);
693 Jump isImmediate = emitJumpIfNotJSCell(regT0);
695 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
696 move(TrustedImm32(1), regT0);
697 Jump wasNotMasqueradesAsUndefined = jump();
699 isMasqueradesAsUndefined.link(this);
700 emitLoadStructure(*vm(), regT0, regT2, regT1);
701 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
702 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
703 comparePtr(NotEqual, regT0, regT2, regT0);
704 Jump wasNotImmediate = jump();
706 isImmediate.link(this);
708 and64(TrustedImm32(~TagBitUndefined), regT0);
709 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
711 wasNotImmediate.link(this);
712 wasNotMasqueradesAsUndefined.link(this);
715 emitPutVirtualRegister(dst);
718 void JIT::emit_op_enter(Instruction*)
720 // Even though CTI doesn't use them, we initialize our constant
721 // registers to zap stale pointers, to avoid unnecessarily prolonging
722 // object lifetime and increasing GC pressure.
723 size_t count = m_codeBlock->m_numVars;
724 for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
725 emitInitRegister(virtualRegisterForLocal(j).offset());
727 emitWriteBarrier(m_codeBlock);
729 emitEnterOptimizationCheck();
732 void JIT::emit_op_get_scope(Instruction* currentInstruction)
734 int dst = currentInstruction[1].u.operand;
735 emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, regT0);
736 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0);
737 emitStoreCell(dst, regT0);
740 void JIT::emit_op_to_this(Instruction* currentInstruction)
742 WriteBarrierBase<Structure>* cachedStructure = ¤tInstruction[2].u.structure;
743 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
745 emitJumpSlowCaseIfNotJSCell(regT1);
747 addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
748 loadPtr(cachedStructure, regT2);
749 addSlowCase(branchTestPtr(Zero, regT2));
750 load32(Address(regT2, Structure::structureIDOffset()), regT2);
751 addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2));
754 void JIT::emit_op_create_this(Instruction* currentInstruction)
756 int callee = currentInstruction[2].u.operand;
757 WriteBarrierBase<JSCell>* cachedFunction = ¤tInstruction[4].u.jsCell;
758 RegisterID calleeReg = regT0;
759 RegisterID rareDataReg = regT4;
760 RegisterID resultReg = regT0;
761 RegisterID allocatorReg = regT1;
762 RegisterID structureReg = regT2;
763 RegisterID cachedFunctionReg = regT4;
764 RegisterID scratchReg = regT3;
766 emitGetVirtualRegister(callee, calleeReg);
767 addSlowCase(branch8(NotEqual, Address(calleeReg, JSCell::typeInfoTypeOffset()), TrustedImm32(JSFunctionType)));
768 loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg);
769 addSlowCase(branchTestPtr(Zero, rareDataReg));
770 xorPtr(TrustedImmPtr(JSFunctionPoison::key()), rareDataReg);
771 load32(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
772 loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
773 addSlowCase(branch32(Equal, allocatorReg, TrustedImm32(Allocator().offset())));
775 loadPtr(cachedFunction, cachedFunctionReg);
776 Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects()));
777 addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg));
778 hasSeenMultipleCallees.link(this);
781 auto butterfly = TrustedImmPtr(nullptr);
782 auto mask = TrustedImm32(0);
783 emitAllocateJSObject(resultReg, JITAllocator::variable(), allocatorReg, structureReg, butterfly, mask, scratchReg, slowCases);
784 emitGetVirtualRegister(callee, scratchReg);
785 loadPtr(Address(scratchReg, JSFunction::offsetOfRareData()), scratchReg);
786 xorPtr(TrustedImmPtr(JSFunctionPoison::key()), scratchReg);
787 load32(Address(scratchReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfInlineCapacity()), scratchReg);
788 emitInitializeInlineStorage(resultReg, scratchReg);
789 addSlowCase(slowCases);
790 emitPutVirtualRegister(currentInstruction[1].u.operand);
793 void JIT::emit_op_check_tdz(Instruction* currentInstruction)
795 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
796 addSlowCase(branchTest64(Zero, regT0));
802 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
804 linkAllSlowCases(iter);
806 callOperation(operationCompareEq, regT0, regT1);
807 emitTagBool(returnValueGPR);
808 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
811 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
813 linkAllSlowCases(iter);
815 callOperation(operationCompareEq, regT0, regT1);
816 xor32(TrustedImm32(0x1), regT0);
817 emitTagBool(returnValueGPR);
818 emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR);
821 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
823 linkAllSlowCases(iter);
825 auto& bytecode = *reinterpret_cast<OpInstanceof*>(currentInstruction);
826 int dst = bytecode.dst();
827 int value = bytecode.value();
828 int proto = bytecode.prototype();
830 emitGetVirtualRegister(value, regT0);
831 emitGetVirtualRegister(proto, regT1);
832 callOperation(operationInstanceOf, dst, regT0, regT1);
835 void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
837 linkAllSlowCases(iter);
839 auto& bytecode = *reinterpret_cast<OpInstanceofCustom*>(currentInstruction);
840 int dst = bytecode.dst();
841 int value = bytecode.value();
842 int constructor = bytecode.constructor();
843 int hasInstanceValue = bytecode.hasInstanceValue();
845 emitGetVirtualRegister(value, regT0);
846 emitGetVirtualRegister(constructor, regT1);
847 emitGetVirtualRegister(hasInstanceValue, regT2);
848 callOperation(operationInstanceOfCustom, regT0, regT1, regT2);
849 emitTagBool(returnValueGPR);
850 emitPutVirtualRegister(dst, returnValueGPR);
853 #endif // USE(JSVALUE64)
855 void JIT::emit_op_loop_hint(Instruction*)
857 // Emit the JIT optimization check:
858 if (canBeOptimized()) {
859 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
860 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
864 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
867 // Emit the slow path for the JIT optimization check:
868 if (canBeOptimized()) {
869 linkAllSlowCases(iter);
871 copyCalleeSavesFromFrameOrRegisterToEntryFrameCalleeSavesBuffer(vm()->topEntryFrame);
873 callOperation(operationOptimize, m_bytecodeOffset);
874 Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR);
875 if (!ASSERT_DISABLED) {
876 Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000))));
877 abortWithReason(JITUnreasonableLoopHintJumpTarget);
880 jump(returnValueGPR, NoPtrTag);
881 noOptimizedEntry.link(this);
883 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
890 void JIT::emit_op_check_traps(Instruction*)
892 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->needTrapHandlingAddress())));
895 void JIT::emit_op_nop(Instruction*)
899 void JIT::emit_op_super_sampler_begin(Instruction*)
901 add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
904 void JIT::emit_op_super_sampler_end(Instruction*)
906 sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount)));
909 void JIT::emitSlow_op_check_traps(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
911 linkAllSlowCases(iter);
913 callOperation(operationHandleTraps);
916 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
918 int dst = currentInstruction[1].u.operand;
919 callOperation(operationNewRegexp, m_codeBlock->regexp(currentInstruction[2].u.operand));
920 emitStoreCell(dst, returnValueGPR);
923 void JIT::emitNewFuncCommon(Instruction* currentInstruction)
926 int dst = currentInstruction[1].u.operand;
929 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
931 emitLoadPayload(currentInstruction[2].u.operand, regT0);
933 FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand);
935 OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
936 if (opcodeID == op_new_func)
937 callOperation(operationNewFunction, dst, regT0, funcExec);
938 else if (opcodeID == op_new_generator_func)
939 callOperation(operationNewGeneratorFunction, dst, regT0, funcExec);
940 else if (opcodeID == op_new_async_func)
941 callOperation(operationNewAsyncFunction, dst, regT0, funcExec);
943 ASSERT(opcodeID == op_new_async_generator_func);
944 callOperation(operationNewAsyncGeneratorFunction, dst, regT0, funcExec);
948 void JIT::emit_op_new_func(Instruction* currentInstruction)
950 emitNewFuncCommon(currentInstruction);
953 void JIT::emit_op_new_generator_func(Instruction* currentInstruction)
955 emitNewFuncCommon(currentInstruction);
958 void JIT::emit_op_new_async_generator_func(Instruction* currentInstruction)
960 emitNewFuncCommon(currentInstruction);
963 void JIT::emit_op_new_async_func(Instruction* currentInstruction)
965 emitNewFuncCommon(currentInstruction);
968 void JIT::emitNewFuncExprCommon(Instruction* currentInstruction)
970 Jump notUndefinedScope;
971 int dst = currentInstruction[1].u.operand;
973 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
974 notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined())));
975 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst));
977 emitLoadPayload(currentInstruction[2].u.operand, regT0);
978 notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag));
979 emitStore(dst, jsUndefined());
982 notUndefinedScope.link(this);
984 FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand);
985 OpcodeID opcodeID = Interpreter::getOpcodeID(currentInstruction->u.opcode);
987 if (opcodeID == op_new_func_exp)
988 callOperation(operationNewFunction, dst, regT0, function);
989 else if (opcodeID == op_new_generator_func_exp)
990 callOperation(operationNewGeneratorFunction, dst, regT0, function);
991 else if (opcodeID == op_new_async_func_exp)
992 callOperation(operationNewAsyncFunction, dst, regT0, function);
994 ASSERT(opcodeID == op_new_async_generator_func_exp);
995 callOperation(operationNewAsyncGeneratorFunction, dst, regT0, function);
1001 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1003 emitNewFuncExprCommon(currentInstruction);
1006 void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction)
1008 emitNewFuncExprCommon(currentInstruction);
1011 void JIT::emit_op_new_async_func_exp(Instruction* currentInstruction)
1013 emitNewFuncExprCommon(currentInstruction);
1016 void JIT::emit_op_new_async_generator_func_exp(Instruction* currentInstruction)
1018 emitNewFuncExprCommon(currentInstruction);
1021 void JIT::emit_op_new_array(Instruction* currentInstruction)
1023 int dst = currentInstruction[1].u.operand;
1024 int valuesIndex = currentInstruction[2].u.operand;
1025 int size = currentInstruction[3].u.operand;
1026 addPtr(TrustedImm32(valuesIndex * sizeof(Register)), callFrameRegister, regT0);
1027 callOperation(operationNewArrayWithProfile, dst,
1028 currentInstruction[4].u.arrayAllocationProfile, regT0, size);
1031 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1033 int dst = currentInstruction[1].u.operand;
1034 int sizeIndex = currentInstruction[2].u.operand;
1036 emitGetVirtualRegister(sizeIndex, regT0);
1037 callOperation(operationNewArrayWithSizeAndProfile, dst,
1038 currentInstruction[3].u.arrayAllocationProfile, regT0);
1040 emitLoad(sizeIndex, regT1, regT0);
1041 callOperation(operationNewArrayWithSizeAndProfile, dst,
1042 currentInstruction[3].u.arrayAllocationProfile, JSValueRegs(regT1, regT0));
1047 void JIT::emit_op_has_structure_property(Instruction* currentInstruction)
1049 int dst = currentInstruction[1].u.operand;
1050 int base = currentInstruction[2].u.operand;
1051 int enumerator = currentInstruction[4].u.operand;
1053 emitGetVirtualRegister(base, regT0);
1054 emitGetVirtualRegister(enumerator, regT1);
1055 emitJumpSlowCaseIfNotJSCell(regT0, base);
1057 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1058 addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1060 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1061 emitPutVirtualRegister(dst);
1064 void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1066 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1068 PatchableJump badType;
1070 // FIXME: Add support for other types like TypedArrays and Arguments.
1071 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1072 JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType);
1073 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1076 LinkBuffer patchBuffer(*this, m_codeBlock);
1078 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1079 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1081 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1083 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1084 m_codeBlock, patchBuffer, NoPtrTag,
1085 "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
1087 MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1088 MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric, SlowPathPtrTag));
1091 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
1093 int dst = currentInstruction[1].u.operand;
1094 int base = currentInstruction[2].u.operand;
1095 int property = currentInstruction[3].u.operand;
1096 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1097 ByValInfo* byValInfo = m_codeBlock->addByValInfo();
1099 emitGetVirtualRegisters(base, regT0, property, regT1);
1101 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
1102 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
1103 // number was signed since m_vectorLength is always less than intmax (since the total allocation
1104 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
1105 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
1106 // extending since it makes it easier to re-tag the value in the slow case.
1107 zeroExtend32ToPtr(regT1, regT1);
1109 emitJumpSlowCaseIfNotJSCell(regT0, base);
1110 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
1111 and32(TrustedImm32(IndexingShapeMask), regT2);
1113 JITArrayMode mode = chooseArrayMode(profile);
1114 PatchableJump badType;
1116 // FIXME: Add support for other types like TypedArrays and Arguments.
1117 // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034.
1118 JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType);
1120 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
1122 addSlowCase(badType);
1123 addSlowCase(slowCases);
1125 Label done = label();
1127 emitPutVirtualRegister(dst);
1129 Label nextHotPath = label();
1131 m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath));
1134 void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1136 linkAllSlowCases(iter);
1138 int dst = currentInstruction[1].u.operand;
1139 int base = currentInstruction[2].u.operand;
1140 int property = currentInstruction[3].u.operand;
1141 ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
1143 Label slowPath = label();
1145 emitGetVirtualRegister(base, regT0);
1146 emitGetVirtualRegister(property, regT1);
1147 Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
1149 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
1150 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
1151 m_byValInstructionIndex++;
1154 void JIT::emit_op_get_direct_pname(Instruction* currentInstruction)
1156 int dst = currentInstruction[1].u.operand;
1157 int base = currentInstruction[2].u.operand;
1158 int index = currentInstruction[4].u.operand;
1159 int enumerator = currentInstruction[5].u.operand;
1161 // Check that base is a cell
1162 emitGetVirtualRegister(base, regT0);
1163 emitJumpSlowCaseIfNotJSCell(regT0, base);
1165 // Check the structure
1166 emitGetVirtualRegister(enumerator, regT2);
1167 load32(Address(regT0, JSCell::structureIDOffset()), regT1);
1168 addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset())));
1170 // Compute the offset
1171 emitGetVirtualRegister(index, regT1);
1172 // If index is less than the enumerator's cached inline storage, then it's an inline access
1173 Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()));
1174 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0);
1175 signExtend32ToPtr(regT1, regT1);
1176 load64(BaseIndex(regT0, regT1, TimesEight), regT0);
1180 // Otherwise it's out of line
1181 outOfLineAccess.link(this);
1182 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
1183 sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1);
1185 signExtend32ToPtr(regT1, regT1);
1186 int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue);
1187 load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0);
1190 emitValueProfilingSite();
1191 emitPutVirtualRegister(dst, regT0);
1194 void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction)
1196 int dst = currentInstruction[1].u.operand;
1197 int enumerator = currentInstruction[2].u.operand;
1198 int index = currentInstruction[3].u.operand;
1200 emitGetVirtualRegister(index, regT0);
1201 emitGetVirtualRegister(enumerator, regT1);
1202 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset()));
1204 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1207 inBounds.link(this);
1209 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1210 signExtend32ToPtr(regT0, regT0);
1211 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1214 emitPutVirtualRegister(dst);
1217 void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction)
1219 int dst = currentInstruction[1].u.operand;
1220 int enumerator = currentInstruction[2].u.operand;
1221 int index = currentInstruction[3].u.operand;
1223 emitGetVirtualRegister(index, regT0);
1224 emitGetVirtualRegister(enumerator, regT1);
1225 Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset()));
1227 move(TrustedImm64(JSValue::encode(jsNull())), regT0);
1230 inBounds.link(this);
1232 loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1);
1233 signExtend32ToPtr(regT0, regT0);
1234 load64(BaseIndex(regT1, regT0, TimesEight), regT0);
1237 emitPutVirtualRegister(dst);
1240 void JIT::emit_op_profile_type(Instruction* currentInstruction)
1242 TypeLocation* cachedTypeLocation = currentInstruction[2].u.location;
1243 int valueToProfile = currentInstruction[1].u.operand;
1245 emitGetVirtualRegister(valueToProfile, regT0);
1249 jumpToEnd.append(branchTest64(Zero, regT0));
1251 // Compile in a predictive type check, if possible, to see if we can skip writing to the log.
1252 // These typechecks are inlined to match those of the 64-bit JSValue type checks.
1253 if (cachedTypeLocation->m_lastSeenType == TypeUndefined)
1254 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined()))));
1255 else if (cachedTypeLocation->m_lastSeenType == TypeNull)
1256 jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))));
1257 else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) {
1259 and64(TrustedImm32(~1), regT1);
1260 jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse)));
1261 } else if (cachedTypeLocation->m_lastSeenType == TypeAnyInt)
1262 jumpToEnd.append(emitJumpIfInt(regT0));
1263 else if (cachedTypeLocation->m_lastSeenType == TypeNumber)
1264 jumpToEnd.append(emitJumpIfNumber(regT0));
1265 else if (cachedTypeLocation->m_lastSeenType == TypeString) {
1266 Jump isNotCell = emitJumpIfNotJSCell(regT0);
1267 jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType)));
1268 isNotCell.link(this);
1271 // Load the type profiling log into T2.
1272 TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog();
1273 move(TrustedImmPtr(cachedTypeProfilerLog), regT2);
1274 // Load the next log entry into T1.
1275 loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1);
1277 // Store the JSValue onto the log entry.
1278 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset()));
1280 // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry.
1281 Jump notCell = emitJumpIfNotJSCell(regT0);
1282 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
1283 store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1284 Jump skipIsCell = jump();
1286 store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset()));
1287 skipIsCell.link(this);
1289 // Store the typeLocation on the log entry.
1290 move(TrustedImmPtr(cachedTypeLocation), regT0);
1291 store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset()));
1293 // Increment the current log entry.
1294 addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1);
1295 store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset()));
1296 Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr()));
1297 // Clear the log if we're at the end of the log.
1298 callOperation(operationProcessTypeProfilerLog);
1299 skipClearLog.link(this);
1301 jumpToEnd.link(this);
1304 void JIT::emit_op_log_shadow_chicken_prologue(Instruction* currentInstruction)
1306 updateTopCallFrame();
1307 static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1308 GPRReg shadowPacketReg = regT0;
1309 GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1310 GPRReg scratch2Reg = regT2;
1311 ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1312 emitGetVirtualRegister(currentInstruction[1].u.operand, regT3);
1313 logShadowChickenProloguePacket(shadowPacketReg, scratch1Reg, regT3);
1316 void JIT::emit_op_log_shadow_chicken_tail(Instruction* currentInstruction)
1318 updateTopCallFrame();
1319 static_assert(nonArgGPR0 != regT0 && nonArgGPR0 != regT2, "we will have problems if this is true.");
1320 GPRReg shadowPacketReg = regT0;
1321 GPRReg scratch1Reg = nonArgGPR0; // This must be a non-argument register.
1322 GPRReg scratch2Reg = regT2;
1323 ensureShadowChickenPacket(*vm(), shadowPacketReg, scratch1Reg, scratch2Reg);
1324 emitGetVirtualRegister(currentInstruction[1].u.operand, regT2);
1325 emitGetVirtualRegister(currentInstruction[2].u.operand, regT3);
1326 logShadowChickenTailPacket(shadowPacketReg, JSValueRegs(regT2), regT3, m_codeBlock, CallSiteIndex(m_bytecodeOffset));
1329 #endif // USE(JSVALUE64)
1331 void JIT::emit_op_profile_control_flow(Instruction* currentInstruction)
1333 BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation;
1335 basicBlockLocation->emitExecuteCode(*this);
1337 basicBlockLocation->emitExecuteCode(*this, regT0);
1341 void JIT::emit_op_argument_count(Instruction* currentInstruction)
1343 int dst = currentInstruction[1].u.operand;
1344 load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1345 sub32(TrustedImm32(1), regT0);
1346 JSValueRegs result = JSValueRegs::withTwoAvailableRegs(regT0, regT1);
1347 boxInt32(regT0, result);
1348 emitPutVirtualRegister(dst, result);
1351 void JIT::emit_op_get_rest_length(Instruction* currentInstruction)
1353 int dst = currentInstruction[1].u.operand;
1354 unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue;
1355 load32(payloadFor(CallFrameSlot::argumentCount), regT0);
1356 sub32(TrustedImm32(1), regT0);
1357 Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip));
1358 sub32(Imm32(numParamsToSkip), regT0);
1360 boxInt32(regT0, JSValueRegs(regT0));
1364 zeroLength.link(this);
1366 move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0);
1368 move(TrustedImm32(0), regT0);
1373 emitPutVirtualRegister(dst, regT0);
1375 move(TrustedImm32(JSValue::Int32Tag), regT1);
1376 emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0));
1380 void JIT::emit_op_get_argument(Instruction* currentInstruction)
1382 int dst = currentInstruction[1].u.operand;
1383 int index = currentInstruction[2].u.operand;
1385 JSValueRegs resultRegs(regT0);
1387 JSValueRegs resultRegs(regT1, regT0);
1390 load32(payloadFor(CallFrameSlot::argumentCount), regT2);
1391 Jump argumentOutOfBounds = branch32(LessThanOrEqual, regT2, TrustedImm32(index));
1392 loadValue(addressFor(CallFrameSlot::thisArgument + index), resultRegs);
1395 argumentOutOfBounds.link(this);
1396 moveValue(jsUndefined(), resultRegs);
1399 emitValueProfilingSite();
1400 emitPutVirtualRegister(dst, resultRegs);
1405 #endif // ENABLE(JIT)