2 * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
3 * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Arguments.h"
32 #include "CopiedSpaceInlines.h"
34 #include "JITInlines.h"
35 #include "JITStubCall.h"
38 #include "JSFunction.h"
39 #include "JSPropertyNameIterator.h"
40 #include "LinkBuffer.h"
41 #include "SlowPathCall.h"
47 JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
49 return vm->getCTIStub(nativeCallGenerator);
52 void JIT::emit_op_mov(Instruction* currentInstruction)
54 int dst = currentInstruction[1].u.operand;
55 int src = currentInstruction[2].u.operand;
57 if (canBeOptimizedOrInlined()) {
58 // Use simpler approach, since the DFG thinks that the last result register
59 // is always set to the destination on every operation.
60 emitGetVirtualRegister(src, regT0);
61 emitPutVirtualRegister(dst);
63 if (m_codeBlock->isConstantRegisterIndex(src)) {
64 if (!getConstantOperand(src).isNumber())
65 store64(TrustedImm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
67 store64(Imm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
68 if (dst == m_lastResultBytecodeRegister)
69 killLastResultRegister();
70 } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
71 // If either the src or dst is the cached register go though
72 // get/put registers to make sure we track this correctly.
73 emitGetVirtualRegister(src, regT0);
74 emitPutVirtualRegister(dst);
76 // Perform the copy via regT1; do not disturb any mapping in regT0.
77 load64(Address(callFrameRegister, src * sizeof(Register)), regT1);
78 store64(regT1, Address(callFrameRegister, dst * sizeof(Register)));
83 void JIT::emit_op_end(Instruction* currentInstruction)
85 RELEASE_ASSERT(returnValueRegister != callFrameRegister);
86 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
87 restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
91 void JIT::emit_op_jmp(Instruction* currentInstruction)
93 unsigned target = currentInstruction[1].u.operand;
94 addJump(jump(), target);
97 void JIT::emit_op_new_object(Instruction* currentInstruction)
99 Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
100 size_t allocationSize = JSFinalObject::allocationSize(structure->inlineCapacity());
101 MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
103 RegisterID resultReg = regT0;
104 RegisterID allocatorReg = regT1;
105 RegisterID scratchReg = regT2;
107 move(TrustedImmPtr(allocator), allocatorReg);
108 emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
109 emitPutVirtualRegister(currentInstruction[1].u.operand);
112 void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
115 JITStubCall stubCall(this, cti_op_new_object);
116 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
117 stubCall.call(currentInstruction[1].u.operand);
120 void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
122 int baseVal = currentInstruction[3].u.operand;
124 emitGetVirtualRegister(baseVal, regT0);
126 // Check that baseVal is a cell.
127 emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
129 // Check that baseVal 'ImplementsHasInstance'.
130 loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
131 addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
134 void JIT::emit_op_instanceof(Instruction* currentInstruction)
136 int dst = currentInstruction[1].u.operand;
137 int value = currentInstruction[2].u.operand;
138 int proto = currentInstruction[3].u.operand;
140 // Load the operands (baseVal, proto, and value respectively) into registers.
141 // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
142 emitGetVirtualRegister(value, regT2);
143 emitGetVirtualRegister(proto, regT1);
145 // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
146 emitJumpSlowCaseIfNotJSCell(regT2, value);
147 emitJumpSlowCaseIfNotJSCell(regT1, proto);
149 // Check that prototype is an object
150 loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
151 addSlowCase(emitJumpIfNotObject(regT3));
153 // Optimistically load the result true, and start looping.
154 // Initially, regT1 still contains proto and regT2 still contains value.
155 // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
156 move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
159 // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
160 // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
161 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
162 load64(Address(regT2, Structure::prototypeOffset()), regT2);
163 Jump isInstance = branchPtr(Equal, regT2, regT1);
164 emitJumpIfJSCell(regT2).linkTo(loop, this);
166 // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
167 move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
169 // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
170 isInstance.link(this);
171 emitPutVirtualRegister(dst);
174 void JIT::emit_op_is_undefined(Instruction* currentInstruction)
176 int dst = currentInstruction[1].u.operand;
177 int value = currentInstruction[2].u.operand;
179 emitGetVirtualRegister(value, regT0);
180 Jump isCell = emitJumpIfJSCell(regT0);
182 compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
186 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
187 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
188 move(TrustedImm32(0), regT0);
189 Jump notMasqueradesAsUndefined = jump();
191 isMasqueradesAsUndefined.link(this);
192 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
193 loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
194 comparePtr(Equal, regT0, regT1, regT0);
196 notMasqueradesAsUndefined.link(this);
198 emitTagAsBoolImmediate(regT0);
199 emitPutVirtualRegister(dst);
202 void JIT::emit_op_is_boolean(Instruction* currentInstruction)
204 int dst = currentInstruction[1].u.operand;
205 int value = currentInstruction[2].u.operand;
207 emitGetVirtualRegister(value, regT0);
208 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
209 test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
210 emitTagAsBoolImmediate(regT0);
211 emitPutVirtualRegister(dst);
214 void JIT::emit_op_is_number(Instruction* currentInstruction)
216 int dst = currentInstruction[1].u.operand;
217 int value = currentInstruction[2].u.operand;
219 emitGetVirtualRegister(value, regT0);
220 test64(NonZero, regT0, tagTypeNumberRegister, regT0);
221 emitTagAsBoolImmediate(regT0);
222 emitPutVirtualRegister(dst);
225 void JIT::emit_op_is_string(Instruction* currentInstruction)
227 int dst = currentInstruction[1].u.operand;
228 int value = currentInstruction[2].u.operand;
230 emitGetVirtualRegister(value, regT0);
231 Jump isNotCell = emitJumpIfNotJSCell(regT0);
233 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
234 compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
235 emitTagAsBoolImmediate(regT0);
238 isNotCell.link(this);
239 move(TrustedImm32(ValueFalse), regT0);
242 emitPutVirtualRegister(dst);
245 void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
247 int activation = currentInstruction[1].u.operand;
248 Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
249 JITStubCall stubCall(this, cti_op_tear_off_activation);
250 stubCall.addArgument(activation, regT2);
252 activationNotCreated.link(this);
255 void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
257 int arguments = currentInstruction[1].u.operand;
258 int activation = currentInstruction[2].u.operand;
260 Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(arguments))));
261 JITStubCall stubCall(this, cti_op_tear_off_arguments);
262 stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
263 stubCall.addArgument(activation, regT2);
265 argsNotCreated.link(this);
268 void JIT::emit_op_ret(Instruction* currentInstruction)
270 ASSERT(callFrameRegister != regT1);
271 ASSERT(regT1 != returnValueRegister);
272 ASSERT(returnValueRegister != callFrameRegister);
274 // Return the result in %eax.
275 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
277 // Grab the return address.
278 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
280 // Restore our caller's "r".
281 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
284 restoreReturnAddressBeforeReturn(regT1);
288 void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
290 ASSERT(callFrameRegister != regT1);
291 ASSERT(regT1 != returnValueRegister);
292 ASSERT(returnValueRegister != callFrameRegister);
294 // Return the result in %eax.
295 emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
296 Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
297 loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
298 Jump notObject = emitJumpIfNotObject(regT2);
300 // Grab the return address.
301 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
303 // Restore our caller's "r".
304 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
307 restoreReturnAddressBeforeReturn(regT1);
310 // Return 'this' in %eax.
311 notJSCell.link(this);
312 notObject.link(this);
313 emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
315 // Grab the return address.
316 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
318 // Restore our caller's "r".
319 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
322 restoreReturnAddressBeforeReturn(regT1);
326 void JIT::emit_op_to_primitive(Instruction* currentInstruction)
328 int dst = currentInstruction[1].u.operand;
329 int src = currentInstruction[2].u.operand;
331 emitGetVirtualRegister(src, regT0);
333 Jump isImm = emitJumpIfNotJSCell(regT0);
334 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
338 emitPutVirtualRegister(dst);
342 void JIT::emit_op_strcat(Instruction* currentInstruction)
344 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_strcat);
346 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
349 void JIT::emit_op_not(Instruction* currentInstruction)
351 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
353 // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
354 // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
355 // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
356 xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
357 addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
358 xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
360 emitPutVirtualRegister(currentInstruction[1].u.operand);
363 void JIT::emit_op_jfalse(Instruction* currentInstruction)
365 unsigned target = currentInstruction[2].u.operand;
366 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
368 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
369 Jump isNonZero = emitJumpIfImmediateInteger(regT0);
371 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
372 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
374 isNonZero.link(this);
377 void JIT::emit_op_jeq_null(Instruction* currentInstruction)
379 int src = currentInstruction[1].u.operand;
380 unsigned target = currentInstruction[2].u.operand;
382 emitGetVirtualRegister(src, regT0);
383 Jump isImmediate = emitJumpIfNotJSCell(regT0);
385 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
386 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
387 Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
388 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
389 addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
390 Jump masqueradesGlobalObjectIsForeign = jump();
392 // Now handle the immediate cases - undefined & null
393 isImmediate.link(this);
394 and64(TrustedImm32(~TagBitUndefined), regT0);
395 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
397 isNotMasqueradesAsUndefined.link(this);
398 masqueradesGlobalObjectIsForeign.link(this);
400 void JIT::emit_op_jneq_null(Instruction* currentInstruction)
402 int src = currentInstruction[1].u.operand;
403 unsigned target = currentInstruction[2].u.operand;
405 emitGetVirtualRegister(src, regT0);
406 Jump isImmediate = emitJumpIfNotJSCell(regT0);
408 // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
409 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
410 addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
411 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
412 addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
413 Jump wasNotImmediate = jump();
415 // Now handle the immediate cases - undefined & null
416 isImmediate.link(this);
417 and64(TrustedImm32(~TagBitUndefined), regT0);
418 addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
420 wasNotImmediate.link(this);
423 void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
425 int src = currentInstruction[1].u.operand;
426 Special::Pointer ptr = currentInstruction[2].u.specialPointer;
427 unsigned target = currentInstruction[3].u.operand;
429 emitGetVirtualRegister(src, regT0);
430 addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
433 void JIT::emit_op_eq(Instruction* currentInstruction)
435 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
436 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
437 compare32(Equal, regT1, regT0, regT0);
438 emitTagAsBoolImmediate(regT0);
439 emitPutVirtualRegister(currentInstruction[1].u.operand);
442 void JIT::emit_op_jtrue(Instruction* currentInstruction)
444 unsigned target = currentInstruction[2].u.operand;
445 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
447 Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
448 addJump(emitJumpIfImmediateInteger(regT0), target);
450 addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
451 addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
456 void JIT::emit_op_neq(Instruction* currentInstruction)
458 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
459 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
460 compare32(NotEqual, regT1, regT0, regT0);
461 emitTagAsBoolImmediate(regT0);
463 emitPutVirtualRegister(currentInstruction[1].u.operand);
467 void JIT::emit_op_bitxor(Instruction* currentInstruction)
469 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
470 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
472 emitFastArithReTagImmediate(regT0, regT0);
473 emitPutVirtualRegister(currentInstruction[1].u.operand);
476 void JIT::emit_op_bitor(Instruction* currentInstruction)
478 emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
479 emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
481 emitPutVirtualRegister(currentInstruction[1].u.operand);
484 void JIT::emit_op_throw(Instruction* currentInstruction)
486 JITStubCall stubCall(this, cti_op_throw);
487 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
489 ASSERT(regT0 == returnValueRegister);
491 // cti_op_throw always changes it's return address,
492 // this point in the code should never be reached.
497 void JIT::emit_op_get_pnames(Instruction* currentInstruction)
499 int dst = currentInstruction[1].u.operand;
500 int base = currentInstruction[2].u.operand;
501 int i = currentInstruction[3].u.operand;
502 int size = currentInstruction[4].u.operand;
503 int breakTarget = currentInstruction[5].u.operand;
505 JumpList isNotObject;
507 emitGetVirtualRegister(base, regT0);
508 if (!m_codeBlock->isKnownNotImmediate(base))
509 isNotObject.append(emitJumpIfNotJSCell(regT0));
510 if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
511 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
512 isNotObject.append(emitJumpIfNotObject(regT2));
515 // We could inline the case where you have a valid cache, but
516 // this call doesn't seem to be hot.
517 Label isObject(this);
518 JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
519 getPnamesStubCall.addArgument(regT0);
520 getPnamesStubCall.call(dst);
521 load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
522 store64(tagTypeNumberRegister, addressFor(i));
523 store32(TrustedImm32(Int32Tag), intTagFor(size));
524 store32(regT3, intPayloadFor(size));
527 isNotObject.link(this);
529 and32(TrustedImm32(~TagBitUndefined), regT1);
530 addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
532 JITStubCall toObjectStubCall(this, cti_to_object);
533 toObjectStubCall.addArgument(regT0);
534 toObjectStubCall.call(base);
535 jump().linkTo(isObject, this);
540 void JIT::emit_op_next_pname(Instruction* currentInstruction)
542 int dst = currentInstruction[1].u.operand;
543 int base = currentInstruction[2].u.operand;
544 int i = currentInstruction[3].u.operand;
545 int size = currentInstruction[4].u.operand;
546 int it = currentInstruction[5].u.operand;
547 int target = currentInstruction[6].u.operand;
549 JumpList callHasProperty;
552 load32(intPayloadFor(i), regT0);
553 Jump end = branch32(Equal, regT0, intPayloadFor(size));
556 loadPtr(addressFor(it), regT1);
557 loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
559 load64(BaseIndex(regT2, regT0, TimesEight), regT2);
561 emitPutVirtualRegister(dst, regT2);
564 add32(TrustedImm32(1), regT0);
565 store32(regT0, intPayloadFor(i));
567 // Verify that i is valid:
568 emitGetVirtualRegister(base, regT0);
570 // Test base's structure
571 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
572 callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
574 // Test base's prototype chain
575 loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
576 loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
577 addJump(branchTestPtr(Zero, Address(regT3)), target);
579 Label checkPrototype(this);
580 load64(Address(regT2, Structure::prototypeOffset()), regT2);
581 callHasProperty.append(emitJumpIfNotJSCell(regT2));
582 loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
583 callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
584 addPtr(TrustedImm32(sizeof(Structure*)), regT3);
585 branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
588 addJump(jump(), target);
590 // Slow case: Ask the object if i is valid.
591 callHasProperty.link(this);
592 emitGetVirtualRegister(dst, regT1);
593 JITStubCall stubCall(this, cti_has_property);
594 stubCall.addArgument(regT0);
595 stubCall.addArgument(regT1);
598 // Test for valid key.
599 addJump(branchTest32(NonZero, regT0), target);
600 jump().linkTo(begin, this);
606 void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
608 JITStubCall stubCall(this, cti_op_push_with_scope);
609 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
613 void JIT::emit_op_pop_scope(Instruction*)
615 JITStubCall(this, cti_op_pop_scope).call();
618 void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
620 int dst = currentInstruction[1].u.operand;
621 int src1 = currentInstruction[2].u.operand;
622 int src2 = currentInstruction[3].u.operand;
624 emitGetVirtualRegisters(src1, regT0, src2, regT1);
626 // Jump slow if both are cells (to cover strings).
629 addSlowCase(emitJumpIfJSCell(regT2));
631 // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
633 Jump leftOK = emitJumpIfImmediateInteger(regT0);
634 addSlowCase(emitJumpIfImmediateNumber(regT0));
636 Jump rightOK = emitJumpIfImmediateInteger(regT1);
637 addSlowCase(emitJumpIfImmediateNumber(regT1));
640 if (type == OpStrictEq)
641 compare64(Equal, regT1, regT0, regT0);
643 compare64(NotEqual, regT1, regT0, regT0);
644 emitTagAsBoolImmediate(regT0);
646 emitPutVirtualRegister(dst);
649 void JIT::emit_op_stricteq(Instruction* currentInstruction)
651 compileOpStrictEq(currentInstruction, OpStrictEq);
654 void JIT::emit_op_nstricteq(Instruction* currentInstruction)
656 compileOpStrictEq(currentInstruction, OpNStrictEq);
659 void JIT::emit_op_to_number(Instruction* currentInstruction)
661 int srcVReg = currentInstruction[2].u.operand;
662 emitGetVirtualRegister(srcVReg, regT0);
664 addSlowCase(emitJumpIfNotImmediateNumber(regT0));
666 emitPutVirtualRegister(currentInstruction[1].u.operand);
669 void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
671 JITStubCall stubCall(this, cti_op_push_name_scope);
672 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
673 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
674 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
678 void JIT::emit_op_catch(Instruction* currentInstruction)
680 killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
681 move(regT0, callFrameRegister);
682 peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, vm) / sizeof(void*));
683 load64(Address(regT3, VM::exceptionOffset()), regT0);
684 store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset()));
685 emitPutVirtualRegister(currentInstruction[1].u.operand);
688 void JIT::emit_op_switch_imm(Instruction* currentInstruction)
690 unsigned tableIndex = currentInstruction[1].u.operand;
691 unsigned defaultOffset = currentInstruction[2].u.operand;
692 unsigned scrutinee = currentInstruction[3].u.operand;
694 // create jump table for switch destinations, track this switch statement.
695 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
696 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
697 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
699 JITStubCall stubCall(this, cti_op_switch_imm);
700 stubCall.addArgument(scrutinee, regT2);
701 stubCall.addArgument(TrustedImm32(tableIndex));
706 void JIT::emit_op_switch_char(Instruction* currentInstruction)
708 unsigned tableIndex = currentInstruction[1].u.operand;
709 unsigned defaultOffset = currentInstruction[2].u.operand;
710 unsigned scrutinee = currentInstruction[3].u.operand;
712 // create jump table for switch destinations, track this switch statement.
713 SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex);
714 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
715 jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
717 JITStubCall stubCall(this, cti_op_switch_char);
718 stubCall.addArgument(scrutinee, regT2);
719 stubCall.addArgument(TrustedImm32(tableIndex));
724 void JIT::emit_op_switch_string(Instruction* currentInstruction)
726 unsigned tableIndex = currentInstruction[1].u.operand;
727 unsigned defaultOffset = currentInstruction[2].u.operand;
728 unsigned scrutinee = currentInstruction[3].u.operand;
730 // create jump table for switch destinations, track this switch statement.
731 StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
732 m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
734 JITStubCall stubCall(this, cti_op_switch_string);
735 stubCall.addArgument(scrutinee, regT2);
736 stubCall.addArgument(TrustedImm32(tableIndex));
741 void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
743 JITStubCall stubCall(this, cti_op_throw_static_error);
744 if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
745 stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
747 stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
748 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
752 void JIT::emit_op_debug(Instruction* currentInstruction)
754 #if ENABLE(DEBUG_WITH_BREAKPOINT)
755 UNUSED_PARAM(currentInstruction);
758 JITStubCall stubCall(this, cti_op_debug);
759 stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
764 void JIT::emit_op_eq_null(Instruction* currentInstruction)
766 int dst = currentInstruction[1].u.operand;
767 int src1 = currentInstruction[2].u.operand;
769 emitGetVirtualRegister(src1, regT0);
770 Jump isImmediate = emitJumpIfNotJSCell(regT0);
772 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
773 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
774 move(TrustedImm32(0), regT0);
775 Jump wasNotMasqueradesAsUndefined = jump();
777 isMasqueradesAsUndefined.link(this);
778 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
779 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
780 comparePtr(Equal, regT0, regT2, regT0);
781 Jump wasNotImmediate = jump();
783 isImmediate.link(this);
785 and64(TrustedImm32(~TagBitUndefined), regT0);
786 compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
788 wasNotImmediate.link(this);
789 wasNotMasqueradesAsUndefined.link(this);
791 emitTagAsBoolImmediate(regT0);
792 emitPutVirtualRegister(dst);
796 void JIT::emit_op_neq_null(Instruction* currentInstruction)
798 int dst = currentInstruction[1].u.operand;
799 int src1 = currentInstruction[2].u.operand;
801 emitGetVirtualRegister(src1, regT0);
802 Jump isImmediate = emitJumpIfNotJSCell(regT0);
804 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
805 Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
806 move(TrustedImm32(1), regT0);
807 Jump wasNotMasqueradesAsUndefined = jump();
809 isMasqueradesAsUndefined.link(this);
810 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
811 loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
812 comparePtr(NotEqual, regT0, regT2, regT0);
813 Jump wasNotImmediate = jump();
815 isImmediate.link(this);
817 and64(TrustedImm32(~TagBitUndefined), regT0);
818 compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
820 wasNotImmediate.link(this);
821 wasNotMasqueradesAsUndefined.link(this);
823 emitTagAsBoolImmediate(regT0);
824 emitPutVirtualRegister(dst);
827 void JIT::emit_op_enter(Instruction*)
829 emitEnterOptimizationCheck();
831 // Even though CTI doesn't use them, we initialize our constant
832 // registers to zap stale pointers, to avoid unnecessarily prolonging
833 // object lifetime and increasing GC pressure.
834 size_t count = m_codeBlock->m_numVars;
835 for (size_t j = 0; j < count; ++j)
836 emitInitRegister(localToOperand(j));
839 void JIT::emit_op_create_activation(Instruction* currentInstruction)
841 int dst = currentInstruction[1].u.operand;
843 Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
844 JITStubCall(this, cti_op_push_activation).call(dst);
845 activationCreated.link(this);
848 void JIT::emit_op_create_arguments(Instruction* currentInstruction)
850 int dst = currentInstruction[1].u.operand;
852 Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
853 JITStubCall(this, cti_op_create_arguments).call();
854 emitPutVirtualRegister(dst);
855 emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
856 argsCreated.link(this);
859 void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
861 int dst = currentInstruction[1].u.operand;
863 store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
866 void JIT::emit_op_to_this(Instruction* currentInstruction)
868 WriteBarrierBase<Structure>* cachedStructure = ¤tInstruction[2].u.structure;
869 emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
871 emitJumpSlowCaseIfNotJSCell(regT1);
872 loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
874 addSlowCase(branch8(NotEqual, Address(regT0, Structure::typeInfoTypeOffset()), TrustedImm32(FinalObjectType)));
875 loadPtr(cachedStructure, regT2);
876 addSlowCase(branchPtr(NotEqual, regT0, regT2));
879 void JIT::emit_op_get_callee(Instruction* currentInstruction)
881 int result = currentInstruction[1].u.operand;
882 WriteBarrierBase<JSCell>* cachedFunction = ¤tInstruction[2].u.jsCell;
883 emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
885 loadPtr(cachedFunction, regT2);
886 addSlowCase(branchPtr(NotEqual, regT0, regT2));
888 emitPutVirtualRegister(result);
891 void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
895 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee);
897 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
900 void JIT::emit_op_create_this(Instruction* currentInstruction)
902 int callee = currentInstruction[2].u.operand;
903 RegisterID calleeReg = regT0;
904 RegisterID resultReg = regT0;
905 RegisterID allocatorReg = regT1;
906 RegisterID structureReg = regT2;
907 RegisterID scratchReg = regT3;
909 emitGetVirtualRegister(callee, calleeReg);
910 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
911 loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
912 addSlowCase(branchTestPtr(Zero, allocatorReg));
914 emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
915 emitPutVirtualRegister(currentInstruction[1].u.operand);
918 void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
920 linkSlowCase(iter); // doesn't have an allocation profile
921 linkSlowCase(iter); // allocation failed
923 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this);
925 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
928 void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
930 JITStubCall stubCall(this, cti_op_profile_will_call);
931 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
935 void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
937 JITStubCall stubCall(this, cti_op_profile_did_call);
938 stubCall.addArgument(currentInstruction[1].u.operand, regT1);
945 void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
951 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this);
953 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
956 void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
960 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_primitive);
962 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
965 void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
969 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_not);
971 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
974 void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
977 JITStubCall stubCall(this, cti_op_jtrue);
978 stubCall.addArgument(regT0);
980 emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
983 void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
986 JITStubCall stubCall(this, cti_op_jtrue);
987 stubCall.addArgument(regT0);
989 emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
992 void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
995 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor);
997 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1000 void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1003 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor);
1004 slowPathCall.call();
1005 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1008 void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1011 JITStubCall stubCall(this, cti_op_eq);
1012 stubCall.addArgument(regT0);
1013 stubCall.addArgument(regT1);
1015 emitTagAsBoolImmediate(regT0);
1016 emitPutVirtualRegister(currentInstruction[1].u.operand);
1019 void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1022 JITStubCall stubCall(this, cti_op_eq);
1023 stubCall.addArgument(regT0);
1024 stubCall.addArgument(regT1);
1026 xor32(TrustedImm32(0x1), regT0);
1027 emitTagAsBoolImmediate(regT0);
1028 emitPutVirtualRegister(currentInstruction[1].u.operand);
1031 void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1036 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_stricteq);
1037 slowPathCall.call();
1038 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1041 void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1046 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_nstricteq);
1047 slowPathCall.call();
1048 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1051 void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1053 int dst = currentInstruction[1].u.operand;
1054 int value = currentInstruction[2].u.operand;
1055 int baseVal = currentInstruction[3].u.operand;
1057 linkSlowCaseIfNotJSCell(iter, baseVal);
1059 JITStubCall stubCall(this, cti_op_check_has_instance);
1060 stubCall.addArgument(value, regT2);
1061 stubCall.addArgument(baseVal, regT2);
1064 emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
1067 void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1069 int dst = currentInstruction[1].u.operand;
1070 int value = currentInstruction[2].u.operand;
1071 int proto = currentInstruction[3].u.operand;
1073 linkSlowCaseIfNotJSCell(iter, value);
1074 linkSlowCaseIfNotJSCell(iter, proto);
1076 JITStubCall stubCall(this, cti_op_instanceof);
1077 stubCall.addArgument(value, regT2);
1078 stubCall.addArgument(proto, regT2);
1082 void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1086 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_number);
1087 slowPathCall.call();
1088 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
1091 void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
1093 int dst = currentInstruction[1].u.operand;
1094 int argumentsRegister = currentInstruction[2].u.operand;
1095 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1096 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
1097 sub32(TrustedImm32(1), regT0);
1098 emitFastArithReTagImmediate(regT0, regT0);
1099 emitPutVirtualRegister(dst, regT0);
1102 void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1105 int dst = currentInstruction[1].u.operand;
1106 int base = currentInstruction[2].u.operand;
1107 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
1109 emitGetVirtualRegister(base, regT0);
1110 JITStubCall stubCall(this, cti_op_get_by_id_generic);
1111 stubCall.addArgument(regT0);
1112 stubCall.addArgument(TrustedImmPtr(ident));
1116 void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
1118 int dst = currentInstruction[1].u.operand;
1119 int argumentsRegister = currentInstruction[2].u.operand;
1120 int property = currentInstruction[3].u.operand;
1121 addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
1122 emitGetVirtualRegister(property, regT1);
1123 addSlowCase(emitJumpIfNotImmediateInteger(regT1));
1124 add32(TrustedImm32(1), regT1);
1125 // regT1 now contains the integer index of the argument we want, including this
1126 emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
1127 addSlowCase(branch32(AboveOrEqual, regT1, regT2));
1129 signExtend32ToPtr(regT1, regT1);
1130 load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
1131 emitValueProfilingSite(regT4);
1132 emitPutVirtualRegister(dst, regT0);
1135 void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1137 int dst = currentInstruction[1].u.operand;
1138 int arguments = currentInstruction[2].u.operand;
1139 int property = currentInstruction[3].u.operand;
1142 Jump skipArgumentsCreation = jump();
1146 JITStubCall(this, cti_op_create_arguments).call();
1147 emitPutVirtualRegister(arguments);
1148 emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
1150 skipArgumentsCreation.link(this);
1151 JITStubCall stubCall(this, cti_op_get_by_val_generic);
1152 stubCall.addArgument(arguments, regT2);
1153 stubCall.addArgument(property, regT2);
1154 stubCall.callWithValueProfiling(dst);
1157 #endif // USE(JSVALUE64)
1159 void JIT::emit_op_loop_hint(Instruction*)
1161 // Emit the JIT optimization check:
1162 if (canBeOptimized()) {
1163 if (Options::enableOSREntryInLoops()) {
1164 addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
1165 AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
1167 // Add with saturation.
1168 move(TrustedImmPtr(m_codeBlock->addressOfJITExecuteCounter()), regT3);
1169 load32(regT3, regT2);
1170 Jump dontAdd = branch32(
1172 TrustedImm32(std::numeric_limits<int32_t>::max() - Options::executionCounterIncrementForLoop()));
1173 add32(TrustedImm32(Options::executionCounterIncrementForLoop()), regT2);
1174 store32(regT2, regT3);
1179 // Emit the watchdog timer check:
1180 if (m_vm->watchdog.isEnabled())
1181 addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress())));
1184 void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
1187 // Emit the slow path for the JIT optimization check:
1188 if (canBeOptimized() && Options::enableOSREntryInLoops()) {
1191 JITStubCall stubCall(this, cti_optimize);
1192 stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
1195 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1199 // Emit the slow path of the watchdog timer check:
1200 if (m_vm->watchdog.isEnabled()) {
1203 JITStubCall stubCall(this, cti_handle_watchdog_timer);
1206 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
1211 void JIT::emit_op_new_regexp(Instruction* currentInstruction)
1213 JITStubCall stubCall(this, cti_op_new_regexp);
1214 stubCall.addArgument(TrustedImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
1215 stubCall.call(currentInstruction[1].u.operand);
1218 void JIT::emit_op_new_func(Instruction* currentInstruction)
1221 int dst = currentInstruction[1].u.operand;
1222 if (currentInstruction[3].u.operand) {
1223 #if USE(JSVALUE32_64)
1224 lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag));
1226 lazyJump = branchTest64(NonZero, addressFor(dst));
1230 JITStubCall stubCall(this, cti_op_new_func);
1231 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
1234 if (currentInstruction[3].u.operand) {
1235 #if USE(JSVALUE32_64)
1238 killLastResultRegister();
1240 lazyJump.link(this);
1244 void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
1246 JITStubCall stubCall(this, cti_op_new_func_exp);
1247 stubCall.addArgument(TrustedImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
1248 stubCall.call(currentInstruction[1].u.operand);
1251 void JIT::emit_op_new_array(Instruction* currentInstruction)
1253 JITStubCall stubCall(this, cti_op_new_array);
1254 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1255 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1256 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1257 stubCall.call(currentInstruction[1].u.operand);
1260 void JIT::emit_op_new_array_with_size(Instruction* currentInstruction)
1262 JITStubCall stubCall(this, cti_op_new_array_with_size);
1264 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
1266 stubCall.addArgument(currentInstruction[2].u.operand);
1268 stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.arrayAllocationProfile));
1269 stubCall.call(currentInstruction[1].u.operand);
1272 void JIT::emit_op_new_array_buffer(Instruction* currentInstruction)
1274 JITStubCall stubCall(this, cti_op_new_array_buffer);
1275 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
1276 stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
1277 stubCall.addArgument(TrustedImmPtr(currentInstruction[4].u.arrayAllocationProfile));
1278 stubCall.call(currentInstruction[1].u.operand);
1283 #endif // ENABLE(JIT)