2 * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RepatchBuffer.h"
45 #include "ScratchRegisterAllocator.h"
46 #include "StackAlignment.h"
47 #include "StructureRareDataInlines.h"
48 #include "StructureStubClearingWatchpoint.h"
49 #include "ThunkGenerators.h"
50 #include <wtf/StringPrintStream.h>
54 // Beware: in this code, it is not safe to assume anything about the following registers
55 // that would ordinarily have well-known values:
56 // - tagTypeNumberRegister
59 static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61 FunctionPtr result = MacroAssembler::readCallTarget(call);
63 CodeBlock* codeBlock = repatchBuffer.codeBlock();
64 if (codeBlock->jitType() == JITCode::FTLJIT) {
65 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
66 MacroAssemblerCodePtr::createFromExecutableAddress(
67 result.executableAddress())).callTarget());
70 UNUSED_PARAM(repatchBuffer);
71 #endif // ENABLE(FTL_JIT)
75 static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
78 CodeBlock* codeBlock = repatchBuffer.codeBlock();
79 if (codeBlock->jitType() == JITCode::FTLJIT) {
80 VM& vm = *codeBlock->vm();
81 FTL::Thunks& thunks = *vm.ftlThunks;
82 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83 MacroAssemblerCodePtr::createFromExecutableAddress(
84 MacroAssembler::readCallTarget(call).executableAddress()));
85 key = key.withCallTarget(newCalleeFunction.executableAddress());
86 newCalleeFunction = FunctionPtr(
87 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89 #endif // ENABLE(FTL_JIT)
90 repatchBuffer.relink(call, newCalleeFunction);
93 static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95 RepatchBuffer repatchBuffer(codeblock);
96 repatchCall(repatchBuffer, call, newCalleeFunction);
99 static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
100 const FunctionPtr &slowPathFunction, bool compact)
102 if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
103 vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
105 RepatchBuffer repatchBuffer(codeBlock);
107 // Only optimize once!
108 repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
110 // Patch the structure check & the offset of the load.
111 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
112 repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
115 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
117 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
118 #elif USE(JSVALUE32_64)
120 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
121 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
123 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
124 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
129 static void addStructureTransitionCheck(
130 JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
131 MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
133 if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
134 structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
135 if (!ASSERT_DISABLED) {
136 // If we execute this code, the object must have the structure we expect. Assert
137 // this in debug modes.
138 jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
139 MacroAssembler::Jump ok = branchStructure(
141 MacroAssembler::Equal,
142 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
144 jit.abortWithReason(RepatchIneffectiveWatchpoint);
150 jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
153 MacroAssembler::NotEqual,
154 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
158 static void addStructureTransitionCheck(
159 JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
160 MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
162 if (prototype.isNull())
165 ASSERT(prototype.isCell());
167 addStructureTransitionCheck(
168 prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
169 failureCases, scratchGPR);
172 static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
174 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
175 repatchBuffer.replaceWithJump(
176 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
177 stubInfo.callReturnLocation.dataLabel32AtOffset(
178 -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
179 CodeLocationLabel(target));
183 repatchBuffer.relink(
184 stubInfo.callReturnLocation.jumpAtOffset(
185 stubInfo.patch.deltaCallToJump),
186 CodeLocationLabel(target));
189 static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
191 if (needToRestoreScratch) {
192 stubJit.popToRestore(scratchGPR);
194 success = stubJit.jump();
196 // link failure cases here, so we can pop scratchGPR, and then jump back.
197 failureCases.link(&stubJit);
199 stubJit.popToRestore(scratchGPR);
201 fail = stubJit.jump();
205 success = stubJit.jump();
208 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
210 patchBuffer.link(success, successLabel);
212 if (needToRestoreScratch) {
213 patchBuffer.link(fail, slowCaseBegin);
217 // link failure cases directly back to normal path
218 patchBuffer.link(failureCases, slowCaseBegin);
221 static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
223 linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
234 static const char* toString(ByIdStubKind kind)
241 case CallCustomGetter:
242 return "CallCustomGetter";
245 case CallCustomSetter:
246 return "CallCustomSetter";
248 RELEASE_ASSERT_NOT_REACHED();
253 static ByIdStubKind kindFor(const PropertySlot& slot)
255 if (slot.isCacheableValue())
257 if (slot.isCacheableCustom())
258 return CallCustomGetter;
259 RELEASE_ASSERT(slot.isCacheableGetter());
263 static FunctionPtr customFor(const PropertySlot& slot)
265 if (!slot.isCacheableCustom())
266 return FunctionPtr();
267 return FunctionPtr(slot.customGetter());
270 static ByIdStubKind kindFor(const PutPropertySlot& slot)
272 RELEASE_ASSERT(!slot.isCacheablePut());
273 if (slot.isCacheableSetter())
275 RELEASE_ASSERT(slot.isCacheableCustom());
276 return CallCustomSetter;
279 static FunctionPtr customFor(const PutPropertySlot& slot)
281 if (!slot.isCacheableCustom())
282 return FunctionPtr();
283 return FunctionPtr(slot.customSetter());
286 static void generateByIdStub(
287 ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
288 FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
289 PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
290 CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
292 VM* vm = &exec->vm();
293 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
294 JSValueRegs valueRegs = JSValueRegs(
295 #if USE(JSVALUE32_64)
296 static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
298 static_cast<GPRReg>(stubInfo.patch.valueGPR));
299 GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
300 bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
301 RELEASE_ASSERT(!needToRestoreScratch || kind == GetValue);
303 CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
304 if (needToRestoreScratch) {
305 scratchGPR = AssemblyHelpers::selectScratchGPR(
306 baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
307 stubJit.pushToSave(scratchGPR);
308 needToRestoreScratch = true;
311 MacroAssembler::JumpList failureCases;
313 GPRReg baseForGetGPR;
314 if (loadTargetFromProxy) {
315 baseForGetGPR = valueRegs.payloadGPR();
316 failureCases.append(stubJit.branch8(
317 MacroAssembler::NotEqual,
318 MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()),
319 MacroAssembler::TrustedImm32(PureForwardingProxyType)));
321 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
323 failureCases.append(branchStructure(stubJit,
324 MacroAssembler::NotEqual,
325 MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
328 baseForGetGPR = baseGPR;
330 failureCases.append(branchStructure(stubJit,
331 MacroAssembler::NotEqual,
332 MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()),
336 CodeBlock* codeBlock = exec->codeBlock();
337 if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
338 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
341 watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
343 Structure* currStructure = structure;
344 JSObject* protoObject = 0;
346 WriteBarrier<Structure>* it = chain->head();
347 for (unsigned i = 0; i < count; ++i, ++it) {
348 protoObject = asObject(currStructure->prototypeForLookup(exec));
349 Structure* protoStructure = protoObject->structure();
350 if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
351 vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
352 addStructureTransitionCheck(
353 protoObject, protoStructure, codeBlock, stubInfo, stubJit,
354 failureCases, scratchGPR);
355 currStructure = it->get();
359 GPRReg baseForAccessGPR;
361 // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
362 if (loadTargetFromProxy)
363 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
364 stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
365 baseForAccessGPR = scratchGPR;
367 // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
368 // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
370 if (loadTargetFromProxy)
371 stubJit.move(scratchGPR, baseForGetGPR);
372 baseForAccessGPR = baseForGetGPR;
375 GPRReg loadedValueGPR = InvalidGPRReg;
376 if (kind != CallCustomGetter && kind != CallCustomSetter) {
377 if (kind == GetValue)
378 loadedValueGPR = valueRegs.payloadGPR();
380 loadedValueGPR = scratchGPR;
383 if (isInlineOffset(offset))
384 storageGPR = baseForAccessGPR;
386 stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
387 storageGPR = loadedValueGPR;
391 stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
393 if (kind == GetValue)
394 stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
395 stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
399 // Stuff for custom getters.
400 MacroAssembler::Call operationCall;
401 MacroAssembler::Call handlerCall;
403 // Stuff for JS getters.
404 MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
405 MacroAssembler::Call fastPathCall;
406 MacroAssembler::Call slowPathCall;
407 std::unique_ptr<CallLinkInfo> callLinkInfo;
409 MacroAssembler::Jump success, fail;
410 if (kind != GetValue) {
411 // Need to make sure that whenever this call is made in the future, we remember the
412 // place that we made it from. It just so happens to be the place that we are at
414 stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
415 CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
417 if (kind == CallGetter || kind == CallSetter) {
418 // Create a JS call using a JS call inline cache. Assume that:
420 // - SP is aligned and represents the extent of the calling compiler's stack usage.
422 // - FP is set correctly (i.e. it points to the caller's call frame header).
424 // - SP - FP is an aligned difference.
426 // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
429 // Therefore, we temporarily grow the stack for the purpose of the call and then
432 callLinkInfo = std::make_unique<CallLinkInfo>();
433 callLinkInfo->callType = CallLinkInfo::Call;
434 callLinkInfo->codeOrigin = stubInfo.codeOrigin;
435 callLinkInfo->calleeGPR = loadedValueGPR;
437 MacroAssembler::JumpList done;
439 // There is a 'this' argument but nothing else.
440 unsigned numberOfParameters = 1;
441 // ... unless we're calling a setter.
442 if (kind == CallSetter)
443 numberOfParameters++;
445 // Get the accessor; if there ain't one then the result is jsUndefined().
446 if (kind == CallSetter) {
448 MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
452 MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
455 MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
456 MacroAssembler::Zero, loadedValueGPR);
458 unsigned numberOfRegsForCall =
459 JSStack::CallFrameHeaderSize + numberOfParameters;
461 unsigned numberOfBytesForCall =
462 numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
464 unsigned alignedNumberOfBytesForCall =
465 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
468 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
469 MacroAssembler::stackPointerRegister);
471 MacroAssembler::Address calleeFrame = MacroAssembler::Address(
472 MacroAssembler::stackPointerRegister,
473 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
476 MacroAssembler::TrustedImm32(numberOfParameters),
477 calleeFrame.withOffset(
478 JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
481 loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
485 calleeFrame.withOffset(
486 virtualRegisterForArgument(0).offset() * sizeof(Register)));
488 if (kind == CallSetter) {
491 calleeFrame.withOffset(
492 virtualRegisterForArgument(1).offset() * sizeof(Register)));
495 MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
496 MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
497 MacroAssembler::TrustedImmPtr(0));
499 // loadedValueGPR is already burned. We can reuse it. From here on we assume that
500 // any volatile register will be clobbered anyway.
502 MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
505 loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
506 fastPathCall = stubJit.nearCall();
509 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
510 MacroAssembler::stackPointerRegister);
511 if (kind == CallGetter)
512 stubJit.setupResults(valueRegs);
514 done.append(stubJit.jump());
515 slowCase.link(&stubJit);
517 stubJit.move(loadedValueGPR, GPRInfo::regT0);
518 #if USE(JSVALUE32_64)
519 stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
521 stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
522 slowPathCall = stubJit.nearCall();
525 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
526 MacroAssembler::stackPointerRegister);
527 if (kind == CallGetter)
528 stubJit.setupResults(valueRegs);
530 done.append(stubJit.jump());
531 returnUndefined.link(&stubJit);
533 if (kind == CallGetter)
534 stubJit.moveTrustedValue(jsUndefined(), valueRegs);
538 // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
539 // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
541 if (kind == CallCustomGetter)
542 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
544 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
546 if (kind == CallCustomGetter)
547 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
549 stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
551 stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
553 operationCall = stubJit.call();
554 if (kind == CallCustomGetter)
555 stubJit.setupResults(valueRegs);
556 MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
558 stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
559 handlerCall = stubJit.call();
560 stubJit.jumpToExceptionHandler();
562 noException.link(&stubJit);
565 emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
567 LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
569 linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
570 if (kind == CallCustomGetter || kind == CallCustomSetter) {
571 patchBuffer.link(operationCall, custom);
572 patchBuffer.link(handlerCall, lookupExceptionHandler);
573 } else if (kind == CallGetter || kind == CallSetter) {
574 callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
575 callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
576 callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
578 ThunkGenerator generator = linkThunkGeneratorFor(
579 CodeForCall, RegisterPreservationNotRequired);
581 slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
584 MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
585 exec->codeBlock(), patchBuffer,
586 ("%s access stub for %s, return point %p",
587 toString(kind), toCString(*exec->codeBlock()).data(),
588 successLabel.executableAddress()));
590 if (kind == CallGetter || kind == CallSetter)
591 stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, std::move(callLinkInfo)));
593 stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
596 static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
598 if (Options::forceICFailure())
601 // FIXME: Write a test that proves we need to check for recursion here just
602 // like the interpreter does, then add a check for recursion.
604 CodeBlock* codeBlock = exec->codeBlock();
605 VM* vm = &exec->vm();
607 if ((isJSArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
608 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
609 #if USE(JSVALUE32_64)
610 GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
612 GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
614 MacroAssembler stubJit;
616 if (isJSArray(baseValue)) {
617 GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
618 bool needToRestoreScratch = false;
620 if (scratchGPR == InvalidGPRReg) {
622 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
624 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
626 stubJit.pushToSave(scratchGPR);
627 needToRestoreScratch = true;
630 MacroAssembler::JumpList failureCases;
632 stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
633 failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
634 failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
636 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
637 stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
638 failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
640 stubJit.move(scratchGPR, resultGPR);
642 stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
643 #elif USE(JSVALUE32_64)
644 stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
647 MacroAssembler::Jump success, fail;
649 emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
651 LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
653 linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
655 stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
656 exec->codeBlock(), patchBuffer,
657 ("GetById array length stub for %s, return point %p",
658 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
659 stubInfo.patch.deltaCallToDone).executableAddress()));
661 RepatchBuffer repatchBuffer(codeBlock);
662 replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
663 repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
668 // String.length case
669 MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
671 stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
674 stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
675 #elif USE(JSVALUE32_64)
676 stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
679 MacroAssembler::Jump success = stubJit.jump();
681 LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
683 patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
684 patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
686 stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
687 exec->codeBlock(), patchBuffer,
688 ("GetById string length stub for %s, return point %p",
689 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
690 stubInfo.patch.deltaCallToDone).executableAddress()));
692 RepatchBuffer repatchBuffer(codeBlock);
693 replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
694 repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
699 // FIXME: Cache property access for immediates.
700 if (!baseValue.isCell())
702 JSCell* baseCell = baseValue.asCell();
703 Structure* structure = baseCell->structure();
704 if (!slot.isCacheable())
706 if (!structure->propertyAccessesAreCacheable())
708 TypeInfo typeInfo = structure->typeInfo();
709 if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
712 // Optimize self access.
713 if (slot.slotBase() == baseValue
714 && slot.isCacheableValue()
715 && !slot.watchpointSet()
716 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
717 repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
718 stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
722 repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
726 void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
728 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
730 bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
732 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
735 static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
737 RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
738 RepatchBuffer repatchBuffer(codeBlock);
739 if (stubInfo.u.getByIdList.list->didSelfPatching()) {
740 repatchBuffer.relink(
741 stubInfo.callReturnLocation.jumpAtOffset(
742 stubInfo.patch.deltaCallToJump),
743 CodeLocationLabel(stubRoutine->code().code()));
747 replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
750 static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
752 if (!baseValue.isCell()
753 || !slot.isCacheable())
756 JSCell* baseCell = baseValue.asCell();
757 bool loadTargetFromProxy = false;
758 if (baseCell->type() == PureForwardingProxyType) {
759 baseValue = jsCast<JSProxy*>(baseCell)->target();
760 baseCell = baseValue.asCell();
761 loadTargetFromProxy = true;
764 VM* vm = &exec->vm();
765 CodeBlock* codeBlock = exec->codeBlock();
766 Structure* structure = baseCell->structure(*vm);
768 if (!structure->propertyAccessesAreCacheable())
771 TypeInfo typeInfo = structure->typeInfo();
772 if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
775 if (stubInfo.patch.spillMode == NeedToSpill) {
776 // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
777 // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
778 // if registers were not flushed, don't do non-Value caching.
779 if (!slot.isCacheableValue())
783 PropertyOffset offset = slot.cachedOffset();
784 StructureChain* prototypeChain = 0;
787 if (slot.slotBase() != baseValue) {
788 if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
791 count = normalizePrototypeChainForChainAccess(
792 exec, baseValue, slot.slotBase(), ident, offset);
793 if (count == InvalidPrototypeChain)
795 prototypeChain = structure->prototypeChain(exec);
798 PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
799 if (list->isFull()) {
800 // We need this extra check because of recursion.
804 RefPtr<JITStubRoutine> stubRoutine;
806 exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset,
807 structure, loadTargetFromProxy, slot.watchpointSet(),
808 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
809 CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
811 GetByIdAccess::AccessType accessType;
812 if (slot.isCacheableValue())
813 accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
814 else if (slot.isCacheableGetter())
815 accessType = GetByIdAccess::Getter;
817 accessType = GetByIdAccess::CustomGetter;
819 list->addAccess(GetByIdAccess(
820 *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
821 prototypeChain, count));
823 patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
825 return !list->isFull();
828 void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
830 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
832 bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
834 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
837 static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
839 if (slot.isStrictMode()) {
840 if (putKind == Direct)
841 return operationPutByIdDirectStrict;
842 return operationPutByIdStrict;
844 if (putKind == Direct)
845 return operationPutByIdDirectNonStrict;
846 return operationPutByIdNonStrict;
849 static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
851 if (slot.isStrictMode()) {
852 if (putKind == Direct)
853 return operationPutByIdDirectStrictBuildList;
854 return operationPutByIdStrictBuildList;
856 if (putKind == Direct)
857 return operationPutByIdDirectNonStrictBuildList;
858 return operationPutByIdNonStrictBuildList;
861 static void emitPutReplaceStub(
865 const PutPropertySlot& slot,
866 StructureStubInfo& stubInfo,
868 Structure* structure,
869 CodeLocationLabel failureLabel,
870 RefPtr<JITStubRoutine>& stubRoutine)
872 VM* vm = &exec->vm();
873 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
874 #if USE(JSVALUE32_64)
875 GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
877 GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
879 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
880 allocator.lock(baseGPR);
881 #if USE(JSVALUE32_64)
882 allocator.lock(valueTagGPR);
884 allocator.lock(valueGPR);
886 GPRReg scratchGPR1 = allocator.allocateScratchGPR();
888 CCallHelpers stubJit(vm, exec->codeBlock());
890 allocator.preserveReusedRegistersByPushing(stubJit);
892 MacroAssembler::Jump badStructure = branchStructure(stubJit,
893 MacroAssembler::NotEqual,
894 MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
898 if (isInlineOffset(slot.cachedOffset()))
899 stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
901 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
902 stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
904 #elif USE(JSVALUE32_64)
905 if (isInlineOffset(slot.cachedOffset())) {
906 stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
907 stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
909 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
910 stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
911 stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
915 MacroAssembler::Jump success;
916 MacroAssembler::Jump failure;
918 if (allocator.didReuseRegisters()) {
919 allocator.restoreReusedRegistersByPopping(stubJit);
920 success = stubJit.jump();
922 badStructure.link(&stubJit);
923 allocator.restoreReusedRegistersByPopping(stubJit);
924 failure = stubJit.jump();
926 success = stubJit.jump();
927 failure = badStructure;
930 LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
931 patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
932 patchBuffer.link(failure, failureLabel);
934 stubRoutine = FINALIZE_CODE_FOR_STUB(
935 exec->codeBlock(), patchBuffer,
936 ("PutById replace stub for %s, return point %p",
937 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
938 stubInfo.patch.deltaCallToDone).executableAddress()));
941 static void emitPutTransitionStub(
945 const PutPropertySlot& slot,
946 StructureStubInfo& stubInfo,
948 Structure* structure,
949 Structure* oldStructure,
950 StructureChain* prototypeChain,
951 CodeLocationLabel failureLabel,
952 RefPtr<JITStubRoutine>& stubRoutine)
954 VM* vm = &exec->vm();
956 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
957 #if USE(JSVALUE32_64)
958 GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
960 GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
962 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
963 allocator.lock(baseGPR);
964 #if USE(JSVALUE32_64)
965 allocator.lock(valueTagGPR);
967 allocator.lock(valueGPR);
969 CCallHelpers stubJit(vm);
971 bool needThirdScratch = false;
972 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
973 && oldStructure->outOfLineCapacity()) {
974 needThirdScratch = true;
977 GPRReg scratchGPR1 = allocator.allocateScratchGPR();
978 ASSERT(scratchGPR1 != baseGPR);
979 ASSERT(scratchGPR1 != valueGPR);
981 GPRReg scratchGPR2 = allocator.allocateScratchGPR();
982 ASSERT(scratchGPR2 != baseGPR);
983 ASSERT(scratchGPR2 != valueGPR);
984 ASSERT(scratchGPR2 != scratchGPR1);
987 if (needThirdScratch) {
988 scratchGPR3 = allocator.allocateScratchGPR();
989 ASSERT(scratchGPR3 != baseGPR);
990 ASSERT(scratchGPR3 != valueGPR);
991 ASSERT(scratchGPR3 != scratchGPR1);
992 ASSERT(scratchGPR3 != scratchGPR2);
994 scratchGPR3 = InvalidGPRReg;
996 allocator.preserveReusedRegistersByPushing(stubJit);
998 MacroAssembler::JumpList failureCases;
1000 ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1002 failureCases.append(branchStructure(stubJit,
1003 MacroAssembler::NotEqual,
1004 MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1007 addStructureTransitionCheck(
1008 oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1011 if (putKind == NotDirect) {
1012 for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1013 addStructureTransitionCheck(
1014 (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1019 MacroAssembler::JumpList slowPath;
1021 bool scratchGPR1HasStorage = false;
1023 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1024 size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1025 CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1027 if (!oldStructure->outOfLineCapacity()) {
1028 stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1029 slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1030 stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1031 stubJit.negPtr(scratchGPR1);
1032 stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1033 stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1035 size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1036 ASSERT(newSize > oldSize);
1038 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1039 stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1040 slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1041 stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1042 stubJit.negPtr(scratchGPR1);
1043 stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1044 stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1045 // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1046 for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1047 stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1048 stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1052 stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1053 scratchGPR1HasStorage = true;
1056 ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1057 ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1058 ASSERT(oldStructure->indexingType() == structure->indexingType());
1059 stubJit.store32(MacroAssembler::TrustedImm32(reinterpret_cast<uint32_t>(structure->id())), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1061 if (isInlineOffset(slot.cachedOffset()))
1062 stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1064 if (!scratchGPR1HasStorage)
1065 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1066 stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1068 #elif USE(JSVALUE32_64)
1069 if (isInlineOffset(slot.cachedOffset())) {
1070 stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1071 stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1073 if (!scratchGPR1HasStorage)
1074 stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1075 stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1076 stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1080 MacroAssembler::Jump success;
1081 MacroAssembler::Jump failure;
1083 if (allocator.didReuseRegisters()) {
1084 allocator.restoreReusedRegistersByPopping(stubJit);
1085 success = stubJit.jump();
1087 failureCases.link(&stubJit);
1088 allocator.restoreReusedRegistersByPopping(stubJit);
1089 failure = stubJit.jump();
1091 success = stubJit.jump();
1093 MacroAssembler::Call operationCall;
1094 MacroAssembler::Jump successInSlowPath;
1096 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1097 slowPath.link(&stubJit);
1099 allocator.restoreReusedRegistersByPopping(stubJit);
1100 ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1101 allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1103 stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1105 stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1107 operationCall = stubJit.call();
1108 allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1109 successInSlowPath = stubJit.jump();
1112 LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1113 patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1114 if (allocator.didReuseRegisters())
1115 patchBuffer.link(failure, failureLabel);
1117 patchBuffer.link(failureCases, failureLabel);
1118 if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1119 patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1120 patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1124 createJITStubRoutine(
1126 exec->codeBlock(), patchBuffer,
1127 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1128 structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1129 oldStructure, structure,
1130 toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1131 stubInfo.patch.deltaCallToDone).executableAddress())),
1133 exec->codeBlock()->ownerExecutable(),
1134 structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1138 static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1140 if (Options::forceICFailure())
1143 CodeBlock* codeBlock = exec->codeBlock();
1144 VM* vm = &exec->vm();
1146 if (!baseValue.isCell())
1148 JSCell* baseCell = baseValue.asCell();
1149 Structure* structure = baseCell->structure();
1150 Structure* oldStructure = structure->previousID();
1152 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1154 if (!structure->propertyAccessesAreCacheable())
1157 // Optimize self access.
1158 if (slot.base() == baseValue && slot.isCacheablePut()) {
1159 if (slot.type() == PutPropertySlot::NewProperty) {
1160 if (structure->isDictionary())
1163 // Skip optimizing the case where we need a realloc, if we don't have
1164 // enough registers to make it happen.
1165 if (GPRInfo::numberOfRegisters < 6
1166 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1167 && oldStructure->outOfLineCapacity())
1170 // Skip optimizing the case where we need realloc, and the structure has
1171 // indexing storage.
1172 // FIXME: We shouldn't skip this! Implement it!
1173 // https://bugs.webkit.org/show_bug.cgi?id=130914
1174 if (oldStructure->couldHaveIndexingHeader())
1177 if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1180 StructureChain* prototypeChain = structure->prototypeChain(exec);
1182 emitPutTransitionStub(
1183 exec, baseValue, ident, slot, stubInfo, putKind,
1184 structure, oldStructure, prototypeChain,
1185 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1186 stubInfo.stubRoutine);
1188 RepatchBuffer repatchBuffer(codeBlock);
1189 repatchBuffer.relink(
1190 stubInfo.callReturnLocation.jumpAtOffset(
1191 stubInfo.patch.deltaCallToJump),
1192 CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1193 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1195 stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1200 if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1203 repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1204 stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1207 if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1208 && stubInfo.patch.spillMode == DontSpill) {
1209 RefPtr<JITStubRoutine> stubRoutine;
1211 StructureChain* prototypeChain = 0;
1212 PropertyOffset offset = slot.cachedOffset();
1214 if (baseValue != slot.base()) {
1215 count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1216 if (count == InvalidPrototypeChain)
1219 prototypeChain = structure->prototypeChain(exec);
1221 PolymorphicPutByIdList* list;
1222 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1225 exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1226 offset, structure, false, nullptr,
1227 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1228 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1231 list->addAccess(PutByIdAccess::setter(
1232 *vm, codeBlock->ownerExecutable(),
1233 slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1234 structure, prototypeChain, slot.customSetter(), stubRoutine));
1236 RepatchBuffer repatchBuffer(codeBlock);
1237 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1238 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1239 RELEASE_ASSERT(!list->isFull());
1246 void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1248 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1250 bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
1252 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1255 static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1257 CodeBlock* codeBlock = exec->codeBlock();
1258 VM* vm = &exec->vm();
1260 if (!baseValue.isCell())
1262 JSCell* baseCell = baseValue.asCell();
1263 Structure* structure = baseCell->structure();
1264 Structure* oldStructure = structure->previousID();
1267 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1270 if (!structure->propertyAccessesAreCacheable())
1273 // Optimize self access.
1274 if (slot.base() == baseValue && slot.isCacheablePut()) {
1275 PolymorphicPutByIdList* list;
1276 RefPtr<JITStubRoutine> stubRoutine;
1278 if (slot.type() == PutPropertySlot::NewProperty) {
1279 if (structure->isDictionary())
1282 // Skip optimizing the case where we need a realloc, if we don't have
1283 // enough registers to make it happen.
1284 if (GPRInfo::numberOfRegisters < 6
1285 && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1286 && oldStructure->outOfLineCapacity())
1289 // Skip optimizing the case where we need realloc, and the structure has
1290 // indexing storage.
1291 if (oldStructure->couldHaveIndexingHeader())
1294 if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1297 StructureChain* prototypeChain = structure->prototypeChain(exec);
1299 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1301 return false; // Will get here due to recursion.
1303 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1304 emitPutTransitionStub(
1305 exec, baseValue, propertyName, slot, stubInfo, putKind,
1306 structure, oldStructure, prototypeChain,
1307 CodeLocationLabel(list->currentSlowPathTarget()),
1311 PutByIdAccess::transition(
1312 *vm, codeBlock->ownerExecutable(),
1313 oldStructure, structure, prototypeChain,
1316 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1318 return false; // Will get here due to recursion.
1320 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1322 exec, baseValue, propertyName, slot, stubInfo, putKind,
1323 structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1326 PutByIdAccess::replace(
1327 *vm, codeBlock->ownerExecutable(),
1328 structure, stubRoutine));
1331 RepatchBuffer repatchBuffer(codeBlock);
1332 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1335 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1340 if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1341 && stubInfo.patch.spillMode == DontSpill) {
1342 RefPtr<JITStubRoutine> stubRoutine;
1343 StructureChain* prototypeChain = 0;
1344 PropertyOffset offset = slot.cachedOffset();
1346 if (baseValue != slot.base()) {
1347 count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1348 if (count == InvalidPrototypeChain)
1351 prototypeChain = structure->prototypeChain(exec);
1353 PolymorphicPutByIdList* list;
1354 list = PolymorphicPutByIdList::from(putKind, stubInfo);
1357 exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1358 offset, structure, false, nullptr,
1359 stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1360 CodeLocationLabel(list->currentSlowPathTarget()),
1363 list->addAccess(PutByIdAccess::setter(
1364 *vm, codeBlock->ownerExecutable(),
1365 slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1366 structure, prototypeChain, slot.customSetter(), stubRoutine));
1368 RepatchBuffer repatchBuffer(codeBlock);
1369 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1371 repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1378 void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1380 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1382 bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
1384 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1387 static bool tryRepatchIn(
1388 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1389 const PropertySlot& slot, StructureStubInfo& stubInfo)
1391 if (Options::forceICFailure())
1394 if (!base->structure()->propertyAccessesAreCacheable())
1398 if (!slot.isCacheable())
1402 CodeBlock* codeBlock = exec->codeBlock();
1403 VM* vm = &exec->vm();
1404 Structure* structure = base->structure();
1406 PropertyOffset offsetIgnored;
1407 size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1408 if (count == InvalidPrototypeChain)
1411 PolymorphicAccessStructureList* polymorphicStructureList;
1414 CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1415 CodeLocationLabel slowCaseLabel;
1417 if (stubInfo.accessType == access_unset) {
1418 polymorphicStructureList = new PolymorphicAccessStructureList();
1419 stubInfo.initInList(polymorphicStructureList, 0);
1420 slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1421 stubInfo.patch.deltaCallToSlowCase);
1424 RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1425 polymorphicStructureList = stubInfo.u.inList.structureList;
1426 listIndex = stubInfo.u.inList.listSize;
1427 slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1429 if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1433 StructureChain* chain = structure->prototypeChain(exec);
1434 RefPtr<JITStubRoutine> stubRoutine;
1437 GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1438 GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1439 GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1441 CCallHelpers stubJit(vm);
1443 bool needToRestoreScratch;
1444 if (scratchGPR == InvalidGPRReg) {
1445 scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1446 stubJit.pushToSave(scratchGPR);
1447 needToRestoreScratch = true;
1449 needToRestoreScratch = false;
1451 MacroAssembler::JumpList failureCases;
1452 failureCases.append(branchStructure(stubJit,
1453 MacroAssembler::NotEqual,
1454 MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1457 CodeBlock* codeBlock = exec->codeBlock();
1458 if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1459 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1461 if (slot.watchpointSet())
1462 slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1464 Structure* currStructure = structure;
1465 WriteBarrier<Structure>* it = chain->head();
1466 for (unsigned i = 0; i < count; ++i, ++it) {
1467 JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1468 Structure* protoStructure = prototype->structure();
1469 addStructureTransitionCheck(
1470 prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1471 failureCases, scratchGPR);
1472 if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1473 vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1474 currStructure = it->get();
1478 stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1480 stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1483 MacroAssembler::Jump success, fail;
1485 emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1487 LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
1489 linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1491 stubRoutine = FINALIZE_CODE_FOR_STUB(
1492 exec->codeBlock(), patchBuffer,
1493 ("In (found = %s) stub for %s, return point %p",
1494 wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1495 successLabel.executableAddress()));
1498 polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1499 stubInfo.u.inList.listSize++;
1501 RepatchBuffer repatchBuffer(codeBlock);
1502 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1504 return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
1508 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1509 const PropertySlot& slot, StructureStubInfo& stubInfo)
1511 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
1513 repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1516 static void linkSlowFor(
1517 RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1518 CodeSpecializationKind kind, RegisterPreservationMode registers)
1520 repatchBuffer.relink(
1521 callLinkInfo.callReturnLocation,
1522 vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1526 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1527 JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1528 RegisterPreservationMode registers)
1530 ASSERT(!callLinkInfo.stub);
1532 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1534 // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1535 if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1536 calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1538 VM* vm = callerCodeBlock->vm();
1540 RepatchBuffer repatchBuffer(callerCodeBlock);
1542 ASSERT(!callLinkInfo.isLinked());
1543 callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1544 callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1545 if (shouldShowDisassemblyFor(callerCodeBlock))
1546 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1547 repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1549 if (calleeCodeBlock)
1550 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1552 if (kind == CodeForCall) {
1553 repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1557 ASSERT(kind == CodeForConstruct);
1558 linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1562 ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1563 RegisterPreservationMode registers)
1565 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1566 VM* vm = callerCodeBlock->vm();
1568 RepatchBuffer repatchBuffer(callerCodeBlock);
1570 linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1573 void linkClosureCall(
1574 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1575 Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1576 RegisterPreservationMode registers)
1578 ASSERT(!callLinkInfo.stub);
1580 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1581 VM* vm = callerCodeBlock->vm();
1583 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1585 CCallHelpers stubJit(vm, callerCodeBlock);
1587 CCallHelpers::JumpList slowPath;
1589 ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1591 if (!ASSERT_DISABLED) {
1592 CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1593 CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1594 stubJit.abortWithReason(RepatchInsaneArgumentCount);
1595 okArgumentCount.link(&stubJit);
1599 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1600 // being set. So we do this the hard way.
1601 GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1602 stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1603 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1605 // We would have already checked that the callee is a cell.
1609 branchStructure(stubJit,
1610 CCallHelpers::NotEqual,
1611 CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1616 CCallHelpers::NotEqual,
1617 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1618 CCallHelpers::TrustedImmPtr(executable)));
1621 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1622 GPRInfo::returnValueGPR);
1626 GPRInfo::returnValueGPR,
1627 CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1630 GPRInfo::returnValueGPR,
1631 CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1633 CCallHelpers::TrustedImm32(JSValue::CellTag),
1634 CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1637 AssemblyHelpers::Call call = stubJit.nearCall();
1638 AssemblyHelpers::Jump done = stubJit.jump();
1640 slowPath.link(&stubJit);
1641 stubJit.move(calleeGPR, GPRInfo::regT0);
1642 #if USE(JSVALUE32_64)
1643 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1645 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1646 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1648 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1649 AssemblyHelpers::Jump slow = stubJit.jump();
1651 LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
1653 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1654 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1655 patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1657 patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1658 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1660 RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1662 callerCodeBlock, patchBuffer,
1663 ("Closure call stub for %s, return point %p, target %p (%s)",
1664 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1665 codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1666 *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1668 RepatchBuffer repatchBuffer(callerCodeBlock);
1670 repatchBuffer.replaceWithJump(
1671 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1672 CodeLocationLabel(stubRoutine->code().code()));
1673 linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1675 callLinkInfo.stub = stubRoutine.release();
1677 ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1680 void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1682 repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1683 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1684 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1685 repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1686 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1687 MacroAssembler::Address(
1688 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1689 JSCell::structureIDOffset()),
1690 static_cast<int32_t>(unusedPointer));
1692 repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1694 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1696 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1697 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1699 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1702 void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1704 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1705 V_JITOperation_ESsiJJI optimizedFunction;
1706 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1707 optimizedFunction = operationPutByIdStrictOptimize;
1708 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1709 optimizedFunction = operationPutByIdNonStrictOptimize;
1710 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1711 optimizedFunction = operationPutByIdDirectStrictOptimize;
1713 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1714 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1716 repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1717 CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1718 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1719 repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1720 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1721 MacroAssembler::Address(
1722 static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1723 JSCell::structureIDOffset()),
1724 static_cast<int32_t>(unusedPointer));
1726 repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1728 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1730 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1731 repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1733 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1736 void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1738 repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));