2 * Copyright (C) 2008, 2009, 2014, 2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "CodeBlock.h"
32 #include "DirectArguments.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "GetterSetter.h"
35 #include "Interpreter.h"
36 #include "JITInlines.h"
38 #include "JSEnvironmentRecord.h"
39 #include "JSFunction.h"
40 #include "LinkBuffer.h"
41 #include "ResultType.h"
42 #include "ScopedArguments.h"
43 #include "ScopedArgumentsTable.h"
44 #include "SlowPathCall.h"
45 #include <wtf/StringPrintStream.h>
51 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
53 JSInterfaceJIT jit(vm);
55 failures.append(jit.branchStructure(
57 Address(regT0, JSCell::structureIDOffset()),
58 vm->stringStructure.get()));
60 // Load string length to regT2, and start the process of loading the data pointer into regT0
61 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
62 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
63 failures.append(jit.branchTest32(Zero, regT0));
65 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
66 failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
71 // Load the string flags
72 jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
73 jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
74 is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
75 jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
76 cont8Bit.append(jit.jump());
78 jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
81 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
82 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
83 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
87 jit.move(TrustedImm32(0), regT0);
90 LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
91 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
94 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
96 int dst = currentInstruction[1].u.operand;
97 int base = currentInstruction[2].u.operand;
98 int property = currentInstruction[3].u.operand;
99 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
100 ByValInfo* byValInfo = m_codeBlock->addByValInfo();
102 emitGetVirtualRegisters(base, regT0, property, regT1);
104 emitJumpSlowCaseIfNotJSCell(regT0, base);
106 PatchableJump notIndex = emitPatchableJumpIfNotInt(regT1);
107 addSlowCase(notIndex);
109 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
110 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
111 // number was signed since m_vectorLength is always less than intmax (since the total allocation
112 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
113 // to 64-bits is necessary since it's used in the address calculation). We zero extend rather than sign
114 // extending since it makes it easier to re-tag the value in the slow case.
115 zeroExtend32ToPtr(regT1, regT1);
117 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
118 and32(TrustedImm32(IndexingShapeMask), regT2);
120 PatchableJump badType;
123 JITArrayMode mode = chooseArrayMode(profile);
126 slowCases = emitInt32GetByVal(currentInstruction, badType);
129 slowCases = emitDoubleGetByVal(currentInstruction, badType);
132 slowCases = emitContiguousGetByVal(currentInstruction, badType);
134 case JITArrayStorage:
135 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
142 addSlowCase(badType);
143 addSlowCase(slowCases);
145 Label done = label();
147 if (!ASSERT_DISABLED) {
148 Jump resultOK = branchTest64(NonZero, regT0);
149 abortWithReason(JITGetByValResultIsNotEmpty);
153 emitValueProfilingSite();
154 emitPutVirtualRegister(dst);
156 Label nextHotPath = label();
158 m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, nextHotPath));
161 JIT::JumpList JIT::emitDoubleLoad(Instruction*, PatchableJump& badType)
165 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
166 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
167 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
168 loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
169 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
174 JIT::JumpList JIT::emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape)
178 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
179 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
180 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
181 load64(BaseIndex(regT2, regT1, TimesEight), regT0);
182 slowCases.append(branchTest64(Zero, regT0));
187 JIT::JumpList JIT::emitArrayStorageLoad(Instruction*, PatchableJump& badType)
191 add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
192 badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
194 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
195 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
197 load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
198 slowCases.append(branchTest64(Zero, regT0));
203 JITGetByIdGenerator JIT::emitGetByValWithCachedId(Instruction* currentInstruction, const Identifier& propertyName, Jump& fastDoneCase, Jump& slowDoneCase, JumpList& slowCases)
209 int dst = currentInstruction[1].u.operand;
211 slowCases.append(emitJumpIfNotJSCell(regT1));
212 emitIdentifierCheck(regT1, regT3, propertyName, slowCases);
214 JITGetByIdGenerator gen(
215 m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
216 JSValueRegs(regT0), JSValueRegs(regT0), AccessType::Get);
217 gen.generateFastPath(*this);
219 fastDoneCase = jump();
221 Label coldPathBegin = label();
222 gen.slowPathJump().link(this);
224 Call call = callOperation(WithProfile, operationGetByIdOptimize, dst, gen.stubInfo(), regT0, propertyName.impl());
225 gen.reportSlowPathCall(coldPathBegin, call);
226 slowDoneCase = jump();
231 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
233 int dst = currentInstruction[1].u.operand;
234 int base = currentInstruction[2].u.operand;
235 int property = currentInstruction[3].u.operand;
236 ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
238 linkSlowCaseIfNotJSCell(iter, base); // base cell check
239 linkSlowCase(iter); // property int32 check
240 Jump nonCell = jump();
241 linkSlowCase(iter); // base array check
242 Jump notString = branchStructure(NotEqual,
243 Address(regT0, JSCell::structureIDOffset()),
244 m_vm->stringStructure.get());
245 emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
246 Jump failed = branchTest64(Zero, regT0);
247 emitPutVirtualRegister(dst, regT0);
248 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
250 notString.link(this);
253 linkSlowCase(iter); // vector length check
254 linkSlowCase(iter); // empty value
256 Label slowPath = label();
258 emitGetVirtualRegister(base, regT0);
259 emitGetVirtualRegister(property, regT1);
260 Call call = callOperation(operationGetByValOptimize, dst, regT0, regT1, byValInfo);
262 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
263 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
264 m_byValInstructionIndex++;
266 emitValueProfilingSite();
269 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
271 int base = currentInstruction[1].u.operand;
272 int property = currentInstruction[2].u.operand;
273 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
274 ByValInfo* byValInfo = m_codeBlock->addByValInfo();
276 emitGetVirtualRegisters(base, regT0, property, regT1);
277 emitJumpSlowCaseIfNotJSCell(regT0, base);
278 PatchableJump notIndex = emitPatchableJumpIfNotInt(regT1);
279 addSlowCase(notIndex);
280 // See comment in op_get_by_val.
281 zeroExtend32ToPtr(regT1, regT1);
282 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
283 and32(TrustedImm32(IndexingShapeMask), regT2);
285 PatchableJump badType;
288 JITArrayMode mode = chooseArrayMode(profile);
291 slowCases = emitInt32PutByVal(currentInstruction, badType);
294 slowCases = emitDoublePutByVal(currentInstruction, badType);
297 slowCases = emitContiguousPutByVal(currentInstruction, badType);
299 case JITArrayStorage:
300 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
307 addSlowCase(badType);
308 addSlowCase(slowCases);
310 Label done = label();
312 m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, notIndex, badType, mode, profile, done, done));
315 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
317 int value = currentInstruction[3].u.operand;
318 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
322 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
324 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
325 Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
327 Label storeResult = label();
328 emitGetVirtualRegister(value, regT3);
329 switch (indexingShape) {
331 slowCases.append(emitJumpIfNotInt(regT3));
332 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
335 Jump notInt = emitJumpIfNotInt(regT3);
336 convertInt32ToDouble(regT3, fpRegT0);
339 add64(tagTypeNumberRegister, regT3);
340 move64ToDouble(regT3, fpRegT0);
341 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
343 storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
346 case ContiguousShape:
347 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
348 emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
356 outOfBounds.link(this);
358 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
360 emitArrayProfileStoreToHoleSpecialCase(profile);
362 add32(TrustedImm32(1), regT1, regT3);
363 store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
364 jump().linkTo(storeResult, this);
371 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
373 int value = currentInstruction[3].u.operand;
374 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
378 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
379 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
380 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
382 Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
384 Label storeResult(this);
385 emitGetVirtualRegister(value, regT3);
386 store64(regT3, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
387 emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
391 emitArrayProfileStoreToHoleSpecialCase(profile);
392 add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
393 branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
395 add32(TrustedImm32(1), regT1);
396 store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
397 sub32(TrustedImm32(1), regT1);
398 jump().linkTo(storeResult, this);
405 JITPutByIdGenerator JIT::emitPutByValWithCachedId(Instruction* currentInstruction, PutKind putKind, const Identifier& propertyName, JumpList& doneCases, JumpList& slowCases)
411 int base = currentInstruction[1].u.operand;
412 int value = currentInstruction[3].u.operand;
414 slowCases.append(emitJumpIfNotJSCell(regT1));
415 emitIdentifierCheck(regT1, regT1, propertyName, slowCases);
417 // Write barrier breaks the registers. So after issuing the write barrier,
418 // reload the registers.
419 emitWriteBarrier(base, value, ShouldFilterValue);
420 emitGetVirtualRegisters(base, regT0, value, regT1);
422 JITPutByIdGenerator gen(
423 m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
424 JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(), putKind);
425 gen.generateFastPath(*this);
426 doneCases.append(jump());
428 Label coldPathBegin = label();
429 gen.slowPathJump().link(this);
431 Call call = callOperation(gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, propertyName.impl());
432 gen.reportSlowPathCall(coldPathBegin, call);
433 doneCases.append(jump());
438 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
440 int base = currentInstruction[1].u.operand;
441 int property = currentInstruction[2].u.operand;
442 int value = currentInstruction[3].u.operand;
443 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
444 ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
446 linkSlowCaseIfNotJSCell(iter, base); // base cell check
447 linkSlowCase(iter); // property int32 check
448 linkSlowCase(iter); // base not array check
450 linkSlowCase(iter); // out of bounds
452 JITArrayMode mode = chooseArrayMode(profile);
456 linkSlowCase(iter); // value type check
462 Label slowPath = label();
464 emitGetVirtualRegister(base, regT0);
465 emitGetVirtualRegister(property, regT1);
466 emitGetVirtualRegister(value, regT2);
467 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
468 Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, regT0, regT1, regT2, byValInfo);
470 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
471 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
472 m_byValInstructionIndex++;
475 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
477 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
478 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
479 callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
482 void JIT::emit_op_put_getter_by_id(Instruction* currentInstruction)
484 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
485 int32_t options = currentInstruction[3].u.operand;
486 emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
487 callOperation(operationPutGetterById, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), options, regT1);
490 void JIT::emit_op_put_setter_by_id(Instruction* currentInstruction)
492 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
493 int32_t options = currentInstruction[3].u.operand;
494 emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
495 callOperation(operationPutSetterById, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), options, regT1);
498 void JIT::emit_op_put_getter_setter_by_id(Instruction* currentInstruction)
500 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
501 int32_t attribute = currentInstruction[3].u.operand;
502 emitGetVirtualRegister(currentInstruction[4].u.operand, regT1);
503 emitGetVirtualRegister(currentInstruction[5].u.operand, regT2);
504 callOperation(operationPutGetterSetter, regT0, m_codeBlock->identifier(currentInstruction[2].u.operand).impl(), attribute, regT1, regT2);
507 void JIT::emit_op_put_getter_by_val(Instruction* currentInstruction)
509 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
510 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
511 int32_t attributes = currentInstruction[3].u.operand;
512 emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
513 callOperation(operationPutGetterByVal, regT0, regT1, attributes, regT2);
516 void JIT::emit_op_put_setter_by_val(Instruction* currentInstruction)
518 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
519 emitGetVirtualRegister(currentInstruction[2].u.operand, regT1);
520 int32_t attributes = currentInstruction[3].u.operand;
521 emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
522 callOperation(operationPutSetterByVal, regT0, regT1, attributes, regT2);
525 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
527 int dst = currentInstruction[1].u.operand;
528 int base = currentInstruction[2].u.operand;
529 int property = currentInstruction[3].u.operand;
530 emitGetVirtualRegister(base, regT0);
531 callOperation(operationDeleteById, dst, regT0, &m_codeBlock->identifier(property));
534 void JIT::emit_op_try_get_by_id(Instruction* currentInstruction)
536 int resultVReg = currentInstruction[1].u.operand;
537 int baseVReg = currentInstruction[2].u.operand;
539 emitGetVirtualRegister(baseVReg, regT0);
541 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
543 JITGetByIdGenerator gen(
544 m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
545 JSValueRegs(regT0), JSValueRegs(regT0), AccessType::GetPure);
546 gen.generateFastPath(*this);
547 addSlowCase(gen.slowPathJump());
548 m_getByIds.append(gen);
550 emitPutVirtualRegister(resultVReg);
553 void JIT::emitSlow_op_try_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
555 int resultVReg = currentInstruction[1].u.operand;
556 int baseVReg = currentInstruction[2].u.operand;
557 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
559 linkSlowCaseIfNotJSCell(iter, baseVReg);
562 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
564 Label coldPathBegin = label();
566 Call call = callOperation(operationTryGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
568 gen.reportSlowPathCall(coldPathBegin, call);
571 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
573 int resultVReg = currentInstruction[1].u.operand;
574 int baseVReg = currentInstruction[2].u.operand;
575 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
577 emitGetVirtualRegister(baseVReg, regT0);
579 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
581 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
582 emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT1, m_bytecodeOffset);
584 JITGetByIdGenerator gen(
585 m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
586 JSValueRegs(regT0), JSValueRegs(regT0), AccessType::Get);
587 gen.generateFastPath(*this);
588 addSlowCase(gen.slowPathJump());
589 m_getByIds.append(gen);
591 emitValueProfilingSite();
592 emitPutVirtualRegister(resultVReg);
595 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
597 int resultVReg = currentInstruction[1].u.operand;
598 int baseVReg = currentInstruction[2].u.operand;
599 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
601 linkSlowCaseIfNotJSCell(iter, baseVReg);
604 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
606 Label coldPathBegin = label();
608 Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
610 gen.reportSlowPathCall(coldPathBegin, call);
613 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
615 int baseVReg = currentInstruction[1].u.operand;
616 int valueVReg = currentInstruction[3].u.operand;
617 unsigned direct = currentInstruction[8].u.putByIdFlags & PutByIdIsDirect;
619 emitWriteBarrier(baseVReg, valueVReg, ShouldFilterBase);
621 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
622 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
623 // such that the Structure & offset are always at the same distance from this.
625 emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
627 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
629 JITPutByIdGenerator gen(
630 m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
631 JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(),
632 direct ? Direct : NotDirect);
634 gen.generateFastPath(*this);
635 addSlowCase(gen.slowPathJump());
637 m_putByIds.append(gen);
640 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
642 int baseVReg = currentInstruction[1].u.operand;
643 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
645 linkSlowCaseIfNotJSCell(iter, baseVReg);
648 Label coldPathBegin(this);
650 JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
652 Call call = callOperation(
653 gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
655 gen.reportSlowPathCall(coldPathBegin, call);
658 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
660 if (!needsVarInjectionChecks)
662 addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
665 void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
667 emitVarInjectionCheck(needsVarInjectionChecks);
668 emitGetVirtualRegister(scope, regT0);
669 for (unsigned i = 0; i < depth; ++i)
670 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
671 emitPutVirtualRegister(dst);
674 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
676 int dst = currentInstruction[1].u.operand;
677 int scope = currentInstruction[2].u.operand;
678 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
679 unsigned depth = currentInstruction[5].u.operand;
681 auto emitCode = [&] (ResolveType resolveType) {
682 switch (resolveType) {
685 case GlobalPropertyWithVarInjectionChecks:
686 case GlobalVarWithVarInjectionChecks:
687 case GlobalLexicalVar:
688 case GlobalLexicalVarWithVarInjectionChecks: {
689 JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
690 RELEASE_ASSERT(constantScope);
691 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
692 move(TrustedImmPtr(constantScope), regT0);
693 emitPutVirtualRegister(dst);
697 case ClosureVarWithVarInjectionChecks:
698 emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
701 move(TrustedImmPtr(currentInstruction[6].u.jsCell.get()), regT0);
702 emitPutVirtualRegister(dst);
707 case LocalClosureVar:
708 case UnresolvedProperty:
709 case UnresolvedPropertyWithVarInjectionChecks:
710 RELEASE_ASSERT_NOT_REACHED();
714 switch (resolveType) {
715 case UnresolvedProperty:
716 case UnresolvedPropertyWithVarInjectionChecks: {
718 load32(¤tInstruction[4], regT0);
720 Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(GlobalProperty));
721 emitCode(GlobalProperty);
722 skipToEnd.append(jump());
723 notGlobalProperty.link(this);
725 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
726 emitCode(GlobalPropertyWithVarInjectionChecks);
727 skipToEnd.append(jump());
728 notGlobalPropertyWithVarInjections.link(this);
730 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
731 emitCode(GlobalLexicalVar);
732 skipToEnd.append(jump());
733 notGlobalLexicalVar.link(this);
735 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
736 emitCode(GlobalLexicalVarWithVarInjectionChecks);
737 skipToEnd.append(jump());
738 notGlobalLexicalVarWithVarInjections.link(this);
741 skipToEnd.link(this);
746 emitCode(resolveType);
751 void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
753 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
754 if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar || resolveType == GlobalLexicalVar || resolveType == ModuleVar)
757 if (resolveType == UnresolvedProperty || resolveType == UnresolvedPropertyWithVarInjectionChecks) {
758 linkSlowCase(iter); // var injections check for GlobalPropertyWithVarInjectionChecks.
759 linkSlowCase(iter); // var injections check for GlobalLexicalVarWithVarInjectionChecks.
763 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_resolve_scope);
767 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
769 emitGetVirtualRegister(scope, regT0);
770 loadPtr(structureSlot, regT1);
771 addSlowCase(branchTestPtr(Zero, regT1));
772 load32(Address(regT1, Structure::structureIDOffset()), regT1);
773 addSlowCase(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1));
776 void JIT::emitGetVarFromPointer(JSValue* operand, GPRReg reg)
778 loadPtr(operand, reg);
781 void JIT::emitGetVarFromIndirectPointer(JSValue** operand, GPRReg reg)
783 loadPtr(operand, reg);
787 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
789 emitGetVirtualRegister(scope, regT0);
790 loadPtr(Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register)), regT0);
793 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
795 int dst = currentInstruction[1].u.operand;
796 int scope = currentInstruction[2].u.operand;
797 ResolveType resolveType = GetPutInfo(currentInstruction[4].u.operand).resolveType();
798 Structure** structureSlot = currentInstruction[5].u.structure.slot();
799 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(¤tInstruction[6].u.pointer);
801 auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
802 switch (resolveType) {
804 case GlobalPropertyWithVarInjectionChecks: {
805 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
807 GPRReg result = regT0;
808 GPRReg offset = regT1;
809 GPRReg scratch = regT2;
811 load32(operandSlot, offset);
812 if (!ASSERT_DISABLED) {
813 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
814 abortWithReason(JITOffsetIsNotOutOfLine);
815 isOutOfLine.link(this);
817 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
819 signExtend32ToPtr(offset, offset);
820 load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
824 case GlobalVarWithVarInjectionChecks:
825 case GlobalLexicalVar:
826 case GlobalLexicalVarWithVarInjectionChecks:
827 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
828 if (indirectLoadForOperand)
829 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
831 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
832 if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) // TDZ check.
833 addSlowCase(branchTest64(Zero, regT0));
836 case ClosureVarWithVarInjectionChecks:
837 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
838 emitGetClosureVar(scope, *operandSlot);
843 case LocalClosureVar:
845 case UnresolvedProperty:
846 case UnresolvedPropertyWithVarInjectionChecks:
847 RELEASE_ASSERT_NOT_REACHED();
851 switch (resolveType) {
852 case UnresolvedProperty:
853 case UnresolvedPropertyWithVarInjectionChecks: {
855 load32(¤tInstruction[4], regT0);
856 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
858 Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
859 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
860 isGlobalProperty.link(this);
861 emitCode(GlobalProperty, false);
862 skipToEnd.append(jump());
863 notGlobalPropertyWithVarInjections.link(this);
865 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
866 emitCode(GlobalLexicalVar, true);
867 skipToEnd.append(jump());
868 notGlobalLexicalVar.link(this);
870 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
871 emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
872 skipToEnd.append(jump());
873 notGlobalLexicalVarWithVarInjections.link(this);
877 skipToEnd.link(this);
882 emitCode(resolveType, false);
885 emitPutVirtualRegister(dst);
886 emitValueProfilingSite();
889 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
891 int dst = currentInstruction[1].u.operand;
892 ResolveType resolveType = GetPutInfo(currentInstruction[4].u.operand).resolveType();
894 if (resolveType == GlobalVar || resolveType == ClosureVar)
897 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
898 linkSlowCase(iter); // bad structure
900 if (resolveType == GlobalLexicalVarWithVarInjectionChecks) // Var injections check.
903 if (resolveType == UnresolvedProperty || resolveType == UnresolvedPropertyWithVarInjectionChecks) {
904 // GlobalProperty/GlobalPropertyWithVarInjectionChecks
905 linkSlowCase(iter); // emitLoadWithStructureCheck
906 linkSlowCase(iter); // emitLoadWithStructureCheck
908 linkSlowCase(iter); // TDZ check.
909 // GlobalLexicalVarWithVarInjectionChecks.
910 linkSlowCase(iter); // var injection check.
911 linkSlowCase(iter); // TDZ check.
916 callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
919 void JIT::emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet* set)
921 emitGetVirtualRegister(value, regT0);
922 emitNotifyWrite(set);
923 storePtr(regT0, operand);
925 void JIT::emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet** indirectWatchpointSet)
927 emitGetVirtualRegister(value, regT0);
928 loadPtr(indirectWatchpointSet, regT1);
929 emitNotifyWrite(regT1);
930 loadPtr(addressOfOperand, regT1);
931 storePtr(regT0, regT1);
934 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
936 emitGetVirtualRegister(value, regT1);
937 emitGetVirtualRegister(scope, regT0);
938 emitNotifyWrite(set);
939 storePtr(regT1, Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register)));
942 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
944 int scope = currentInstruction[1].u.operand;
945 int value = currentInstruction[3].u.operand;
946 GetPutInfo getPutInfo = GetPutInfo(currentInstruction[4].u.operand);
947 ResolveType resolveType = getPutInfo.resolveType();
948 Structure** structureSlot = currentInstruction[5].u.structure.slot();
949 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(¤tInstruction[6].u.pointer);
951 auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
952 switch (resolveType) {
954 case GlobalPropertyWithVarInjectionChecks: {
955 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
956 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
957 emitGetVirtualRegister(value, regT2);
959 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
960 loadPtr(operandSlot, regT1);
962 storePtr(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)));
966 case GlobalVarWithVarInjectionChecks:
967 case GlobalLexicalVar:
968 case GlobalLexicalVarWithVarInjectionChecks: {
969 JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
970 RELEASE_ASSERT(constantScope);
971 emitWriteBarrier(constantScope, value, ShouldFilterValue);
972 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
973 if (getPutInfo.initializationMode() != Initialization && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) {
974 // We need to do a TDZ check here because we can't always prove we need to emit TDZ checks statically.
975 if (indirectLoadForOperand)
976 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT0);
978 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT0);
979 addSlowCase(branchTest64(Zero, regT0));
981 if (indirectLoadForOperand)
982 emitPutGlobalVariableIndirect(bitwise_cast<JSValue**>(operandSlot), value, bitwise_cast<WatchpointSet**>(¤tInstruction[5]));
984 emitPutGlobalVariable(bitwise_cast<JSValue*>(*operandSlot), value, currentInstruction[5].u.watchpointSet);
987 case LocalClosureVar:
989 case ClosureVarWithVarInjectionChecks:
990 emitWriteBarrier(scope, value, ShouldFilterValue);
991 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
992 emitPutClosureVar(scope, *operandSlot, value, currentInstruction[5].u.watchpointSet);
998 case UnresolvedProperty:
999 case UnresolvedPropertyWithVarInjectionChecks:
1000 RELEASE_ASSERT_NOT_REACHED();
1005 switch (resolveType) {
1006 case UnresolvedProperty:
1007 case UnresolvedPropertyWithVarInjectionChecks: {
1009 load32(¤tInstruction[4], regT0);
1010 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
1012 Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
1013 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
1014 isGlobalProperty.link(this);
1015 emitCode(GlobalProperty, false);
1016 skipToEnd.append(jump());
1017 notGlobalPropertyWithVarInjections.link(this);
1019 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
1020 emitCode(GlobalLexicalVar, true);
1021 skipToEnd.append(jump());
1022 notGlobalLexicalVar.link(this);
1024 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
1025 emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
1026 skipToEnd.append(jump());
1027 notGlobalLexicalVarWithVarInjections.link(this);
1029 addSlowCase(jump());
1031 skipToEnd.link(this);
1036 emitCode(resolveType, false);
1041 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1043 GetPutInfo getPutInfo = GetPutInfo(currentInstruction[4].u.operand);
1044 ResolveType resolveType = getPutInfo.resolveType();
1045 unsigned linkCount = 0;
1046 if (resolveType != GlobalVar && resolveType != ClosureVar && resolveType != LocalClosureVar && resolveType != GlobalLexicalVar)
1048 if ((resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks
1049 || resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks
1050 || resolveType == LocalClosureVar)
1051 && currentInstruction[5].u.watchpointSet->state() != IsInvalidated)
1053 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
1054 linkCount++; // bad structure
1055 if (getPutInfo.initializationMode() != Initialization && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) // TDZ check.
1057 if (resolveType == UnresolvedProperty || resolveType == UnresolvedPropertyWithVarInjectionChecks) {
1058 // GlobalProperty/GlobalPropertyWithVarInjectionsCheck
1059 linkCount++; // emitLoadWithStructureCheck
1060 linkCount++; // emitLoadWithStructureCheck
1063 bool needsTDZCheck = getPutInfo.initializationMode() != Initialization;
1066 linkCount++; // Notify write check.
1068 // GlobalLexicalVarWithVarInjectionsCheck
1069 linkCount++; // var injection check.
1072 linkCount++; // Notify write check.
1079 if (resolveType == ModuleVar) {
1080 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_strict_mode_readonly_property_write_error);
1081 slowPathCall.call();
1083 callOperation(operationPutToScope, currentInstruction);
1086 void JIT::emit_op_get_from_arguments(Instruction* currentInstruction)
1088 int dst = currentInstruction[1].u.operand;
1089 int arguments = currentInstruction[2].u.operand;
1090 int index = currentInstruction[3].u.operand;
1092 emitGetVirtualRegister(arguments, regT0);
1093 load64(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)), regT0);
1094 emitValueProfilingSite();
1095 emitPutVirtualRegister(dst);
1098 void JIT::emit_op_put_to_arguments(Instruction* currentInstruction)
1100 int arguments = currentInstruction[1].u.operand;
1101 int index = currentInstruction[2].u.operand;
1102 int value = currentInstruction[3].u.operand;
1104 emitWriteBarrier(arguments, value, ShouldFilterValue);
1106 emitGetVirtualRegister(arguments, regT0);
1107 emitGetVirtualRegister(value, regT1);
1108 store64(regT1, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)));
1111 #endif // USE(JSVALUE64)
1114 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1117 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1118 emitGetVirtualRegister(value, regT0);
1119 valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1122 emitGetVirtualRegister(owner, regT0);
1124 if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1125 ownerNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1127 Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(regT0);
1128 callOperation(operationUnconditionalWriteBarrier, regT0);
1129 ownerIsRememberedOrInEden.link(this);
1131 if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
1132 ownerNotCell.link(this);
1133 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue)
1134 valueNotCell.link(this);
1137 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1139 emitGetVirtualRegister(value, regT0);
1141 if (mode == ShouldFilterValue)
1142 valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
1144 emitWriteBarrier(owner);
1146 if (mode == ShouldFilterValue)
1147 valueNotCell.link(this);
1150 #else // USE(JSVALUE64)
1152 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
1155 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
1156 emitLoadTag(value, regT0);
1157 valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1160 emitLoad(owner, regT0, regT1);
1162 if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1163 ownerNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1165 Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(regT1);
1166 callOperation(operationUnconditionalWriteBarrier, regT1);
1167 ownerIsRememberedOrInEden.link(this);
1169 if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
1170 ownerNotCell.link(this);
1171 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue)
1172 valueNotCell.link(this);
1175 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
1178 if (mode == ShouldFilterValue) {
1179 emitLoadTag(value, regT0);
1180 valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
1183 emitWriteBarrier(owner);
1185 if (mode == ShouldFilterValue)
1186 valueNotCell.link(this);
1189 #endif // USE(JSVALUE64)
1191 void JIT::emitWriteBarrier(JSCell* owner)
1193 if (!MarkedBlock::blockFor(owner)->isMarked(owner)) {
1194 Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(owner);
1195 callOperation(operationUnconditionalWriteBarrier, owner);
1196 ownerIsRememberedOrInEden.link(this);
1198 callOperation(operationUnconditionalWriteBarrier, owner);
1201 void JIT::emitIdentifierCheck(RegisterID cell, RegisterID scratch, const Identifier& propertyName, JumpList& slowCases)
1203 if (propertyName.isSymbol()) {
1204 slowCases.append(branchStructure(NotEqual, Address(cell, JSCell::structureIDOffset()), m_vm->symbolStructure.get()));
1205 loadPtr(Address(cell, Symbol::offsetOfPrivateName()), scratch);
1207 slowCases.append(branchStructure(NotEqual, Address(cell, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
1208 loadPtr(Address(cell, JSString::offsetOfValue()), scratch);
1210 slowCases.append(branchPtr(NotEqual, scratch, TrustedImmPtr(propertyName.impl())));
1213 void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1215 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1217 PatchableJump badType;
1220 switch (arrayMode) {
1222 slowCases = emitInt32GetByVal(currentInstruction, badType);
1225 slowCases = emitDoubleGetByVal(currentInstruction, badType);
1228 slowCases = emitContiguousGetByVal(currentInstruction, badType);
1230 case JITArrayStorage:
1231 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
1233 case JITDirectArguments:
1234 slowCases = emitDirectArgumentsGetByVal(currentInstruction, badType);
1236 case JITScopedArguments:
1237 slowCases = emitScopedArgumentsGetByVal(currentInstruction, badType);
1240 TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1242 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, type);
1244 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, type);
1250 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1252 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1253 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1255 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1257 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1258 m_codeBlock, patchBuffer,
1259 ("Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1261 MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1262 MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1265 void JIT::privateCompileGetByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
1267 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1273 JITGetByIdGenerator gen = emitGetByValWithCachedId(currentInstruction, propertyName, fastDoneCase, slowDoneCase, slowCases);
1275 ConcurrentJITLocker locker(m_codeBlock->m_lock);
1276 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1277 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1278 patchBuffer.link(fastDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1279 patchBuffer.link(slowDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToNextHotPath));
1281 for (const auto& callSite : m_calls) {
1283 patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1285 gen.finalize(patchBuffer);
1287 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1288 m_codeBlock, patchBuffer,
1289 ("Baseline get_by_val with cached property name '%s' stub for %s, return point %p", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value()));
1290 byValInfo->stubInfo = gen.stubInfo();
1292 MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1293 MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric));
1296 void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1298 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1300 PatchableJump badType;
1303 bool needsLinkForWriteBarrier = false;
1305 switch (arrayMode) {
1307 slowCases = emitInt32PutByVal(currentInstruction, badType);
1310 slowCases = emitDoublePutByVal(currentInstruction, badType);
1313 slowCases = emitContiguousPutByVal(currentInstruction, badType);
1314 needsLinkForWriteBarrier = true;
1316 case JITArrayStorage:
1317 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
1318 needsLinkForWriteBarrier = true;
1321 TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1323 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, type);
1325 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, type);
1331 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1332 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1333 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1334 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1335 if (needsLinkForWriteBarrier) {
1336 ASSERT(m_calls.last().to == operationUnconditionalWriteBarrier);
1337 patchBuffer.link(m_calls.last().from, operationUnconditionalWriteBarrier);
1340 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
1342 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1343 m_codeBlock, patchBuffer,
1344 ("Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1347 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1348 m_codeBlock, patchBuffer,
1349 ("Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1351 MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1352 MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
1355 void JIT::privateCompilePutByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
1357 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1362 JITPutByIdGenerator gen = emitPutByValWithCachedId(currentInstruction, putKind, propertyName, doneCases, slowCases);
1364 ConcurrentJITLocker locker(m_codeBlock->m_lock);
1365 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1366 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1367 patchBuffer.link(doneCases, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1368 for (const auto& callSite : m_calls) {
1370 patchBuffer.link(callSite.from, FunctionPtr(callSite.to));
1372 gen.finalize(patchBuffer);
1374 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1375 m_codeBlock, patchBuffer,
1376 ("Baseline put_by_val%s with cached property name '%s' stub for %s, return point %p", (putKind == Direct) ? "_direct" : "", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value()));
1377 byValInfo->stubInfo = gen.stubInfo();
1379 MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1380 MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(putKind == Direct ? operationDirectPutByValGeneric : operationPutByValGeneric));
1384 JIT::JumpList JIT::emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType)
1389 RegisterID base = regT0;
1390 RegisterID property = regT1;
1391 JSValueRegs result = JSValueRegs(regT0);
1392 RegisterID scratch = regT3;
1394 RegisterID base = regT0;
1395 RegisterID property = regT2;
1396 JSValueRegs result = JSValueRegs(regT1, regT0);
1397 RegisterID scratch = regT3;
1400 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1401 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(DirectArgumentsType));
1403 slowCases.append(branch32(AboveOrEqual, property, Address(base, DirectArguments::offsetOfLength())));
1404 slowCases.append(branchTestPtr(NonZero, Address(base, DirectArguments::offsetOfOverrides())));
1406 zeroExtend32ToPtr(property, scratch);
1407 loadValue(BaseIndex(base, scratch, TimesEight, DirectArguments::storageOffset()), result);
1412 JIT::JumpList JIT::emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType)
1417 RegisterID base = regT0;
1418 RegisterID property = regT1;
1419 JSValueRegs result = JSValueRegs(regT0);
1420 RegisterID scratch = regT3;
1421 RegisterID scratch2 = regT4;
1423 RegisterID base = regT0;
1424 RegisterID property = regT2;
1425 JSValueRegs result = JSValueRegs(regT1, regT0);
1426 RegisterID scratch = regT3;
1427 RegisterID scratch2 = regT4;
1430 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1431 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(ScopedArgumentsType));
1432 slowCases.append(branch32(AboveOrEqual, property, Address(base, ScopedArguments::offsetOfTotalLength())));
1434 loadPtr(Address(base, ScopedArguments::offsetOfTable()), scratch);
1435 load32(Address(scratch, ScopedArgumentsTable::offsetOfLength()), scratch2);
1436 Jump overflowCase = branch32(AboveOrEqual, property, scratch2);
1437 loadPtr(Address(base, ScopedArguments::offsetOfScope()), scratch2);
1438 loadPtr(Address(scratch, ScopedArgumentsTable::offsetOfArguments()), scratch);
1439 load32(BaseIndex(scratch, property, TimesFour), scratch);
1440 slowCases.append(branch32(Equal, scratch, TrustedImm32(ScopeOffset::invalidOffset)));
1441 loadValue(BaseIndex(scratch2, scratch, TimesEight, JSEnvironmentRecord::offsetOfVariables()), result);
1443 overflowCase.link(this);
1444 sub32(property, scratch2);
1446 loadValue(BaseIndex(base, scratch2, TimesEight, ScopedArguments::overflowStorageOffset()), result);
1447 slowCases.append(branchIfEmpty(result));
1453 JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1455 ASSERT(isInt(type));
1457 // The best way to test the array type is to use the classInfo. We need to do so without
1458 // clobbering the register that holds the indexing type, base, and property.
1461 RegisterID base = regT0;
1462 RegisterID property = regT1;
1463 RegisterID resultPayload = regT0;
1464 RegisterID scratch = regT3;
1466 RegisterID base = regT0;
1467 RegisterID property = regT2;
1468 RegisterID resultPayload = regT0;
1469 RegisterID resultTag = regT1;
1470 RegisterID scratch = regT3;
1475 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1476 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1477 slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1478 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), scratch);
1480 switch (elementSize(type)) {
1482 if (JSC::isSigned(type))
1483 load8SignedExtendTo32(BaseIndex(scratch, property, TimesOne), resultPayload);
1485 load8(BaseIndex(scratch, property, TimesOne), resultPayload);
1488 if (JSC::isSigned(type))
1489 load16SignedExtendTo32(BaseIndex(scratch, property, TimesTwo), resultPayload);
1491 load16(BaseIndex(scratch, property, TimesTwo), resultPayload);
1494 load32(BaseIndex(scratch, property, TimesFour), resultPayload);
1501 if (type == TypeUint32) {
1502 Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1504 convertInt32ToDouble(resultPayload, fpRegT0);
1505 addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1507 moveDoubleTo64(fpRegT0, resultPayload);
1508 sub64(tagTypeNumberRegister, resultPayload);
1510 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1514 canBeInt.link(this);
1518 or64(tagTypeNumberRegister, resultPayload);
1520 move(TrustedImm32(JSValue::Int32Tag), resultTag);
1527 JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1529 ASSERT(isFloat(type));
1532 RegisterID base = regT0;
1533 RegisterID property = regT1;
1534 RegisterID resultPayload = regT0;
1535 RegisterID scratch = regT3;
1537 RegisterID base = regT0;
1538 RegisterID property = regT2;
1539 RegisterID resultPayload = regT0;
1540 RegisterID resultTag = regT1;
1541 RegisterID scratch = regT3;
1546 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1547 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1548 slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1549 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), scratch);
1551 switch (elementSize(type)) {
1553 loadFloat(BaseIndex(scratch, property, TimesFour), fpRegT0);
1554 convertFloatToDouble(fpRegT0, fpRegT0);
1557 loadDouble(BaseIndex(scratch, property, TimesEight), fpRegT0);
1564 Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
1565 static const double NaN = PNaN;
1566 loadDouble(TrustedImmPtr(&NaN), fpRegT0);
1570 moveDoubleTo64(fpRegT0, resultPayload);
1571 sub64(tagTypeNumberRegister, resultPayload);
1573 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1578 JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1580 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1581 ASSERT(isInt(type));
1583 int value = currentInstruction[3].u.operand;
1586 RegisterID base = regT0;
1587 RegisterID property = regT1;
1588 RegisterID earlyScratch = regT3;
1589 RegisterID lateScratch = regT2;
1591 RegisterID base = regT0;
1592 RegisterID property = regT2;
1593 RegisterID earlyScratch = regT3;
1594 RegisterID lateScratch = regT1;
1599 load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1600 badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1601 Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1602 emitArrayProfileOutOfBoundsSpecialCase(profile);
1604 inBounds.link(this);
1607 emitGetVirtualRegister(value, earlyScratch);
1608 slowCases.append(emitJumpIfNotInt(earlyScratch));
1610 emitLoad(value, lateScratch, earlyScratch);
1611 slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1614 // We would be loading this into base as in get_by_val, except that the slow
1615 // path expects the base to be unclobbered.
1616 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1618 if (isClamped(type)) {
1619 ASSERT(elementSize(type) == 1);
1620 ASSERT(!JSC::isSigned(type));
1621 Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1622 Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1623 xor32(earlyScratch, earlyScratch);
1624 Jump clamped = jump();
1626 move(TrustedImm32(0xff), earlyScratch);
1628 inBounds.link(this);
1631 switch (elementSize(type)) {
1633 store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1636 store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1639 store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1650 JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1652 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1653 ASSERT(isFloat(type));
1655 int value = currentInstruction[3].u.operand;
1658 RegisterID base = regT0;
1659 RegisterID property = regT1;
1660 RegisterID earlyScratch = regT3;
1661 RegisterID lateScratch = regT2;
1663 RegisterID base = regT0;
1664 RegisterID property = regT2;
1665 RegisterID earlyScratch = regT3;
1666 RegisterID lateScratch = regT1;
1671 load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1672 badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1673 Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1674 emitArrayProfileOutOfBoundsSpecialCase(profile);
1676 inBounds.link(this);
1679 emitGetVirtualRegister(value, earlyScratch);
1680 Jump doubleCase = emitJumpIfNotInt(earlyScratch);
1681 convertInt32ToDouble(earlyScratch, fpRegT0);
1682 Jump ready = jump();
1683 doubleCase.link(this);
1684 slowCases.append(emitJumpIfNotNumber(earlyScratch));
1685 add64(tagTypeNumberRegister, earlyScratch);
1686 move64ToDouble(earlyScratch, fpRegT0);
1689 emitLoad(value, lateScratch, earlyScratch);
1690 Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1691 convertInt32ToDouble(earlyScratch, fpRegT0);
1692 Jump ready = jump();
1693 doubleCase.link(this);
1694 slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1695 moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1699 // We would be loading this into base as in get_by_val, except that the slow
1700 // path expects the base to be unclobbered.
1701 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1703 switch (elementSize(type)) {
1705 convertDoubleToFloat(fpRegT0, fpRegT0);
1706 storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1709 storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1722 #endif // ENABLE(JIT)