Replace WTF::move with WTFMove
[WebKit-https.git] / Source / JavaScriptCore / jit / Repatch.cpp
index 90efb10..66203d9 100644 (file)
@@ -30,6 +30,7 @@
 
 #include "BinarySwitch.h"
 #include "CCallHelpers.h"
+#include "CallFrameShuffler.h"
 #include "DFGOperations.h"
 #include "DFGSpeculativeJIT.h"
 #include "FTLThunks.h"
@@ -40,7 +41,6 @@
 #include "LinkBuffer.h"
 #include "JSCInlines.h"
 #include "PolymorphicAccess.h"
-#include "RegExpMatchesArray.h"
 #include "ScratchRegisterAllocator.h"
 #include "StackAlignment.h"
 #include "StructureRareDataInlines.h"
@@ -103,23 +103,18 @@ static void repatchByIdSelfAccess(
     MacroAssembler::repatchInt32(
         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
         bitwise_cast<int32_t>(structure->id()));
-    CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
-    if (isOutOfLineOffset(offset))
-        MacroAssembler::replaceWithLoad(convertibleLoad);
-    else
-        MacroAssembler::replaceWithAddressComputation(convertibleLoad);
 #if USE(JSVALUE64)
     if (compact)
-        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
     else
-        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
 #elif USE(JSVALUE32_64)
     if (compact) {
-        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
-        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
     } else {
-        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
-        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
     }
 #endif
 }
@@ -226,19 +221,19 @@ static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, con
         return GiveUpOnCache;
 
     CodeBlock* codeBlock = exec->codeBlock();
-    ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
     VM& vm = exec->vm();
 
     std::unique_ptr<AccessCase> newCase;
 
     if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
-        newCase = AccessCase::getLength(vm, owner, AccessCase::ArrayLength);
+        newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
     else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
-        newCase = AccessCase::getLength(vm, owner, AccessCase::StringLength);
+        newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
     else {
         if (!slot.isCacheable() && !slot.isUnset())
             return GiveUpOnCache;
 
+        ObjectPropertyConditionSet conditionSet;
         JSCell* baseCell = baseValue.asCell();
         Structure* structure = baseCell->structure(vm);
 
@@ -259,56 +254,67 @@ static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, con
             && slot.isCacheableValue()
             && slot.slotBase() == baseValue
             && !slot.watchpointSet()
-            && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))
+            && isInlineOffset(slot.cachedOffset())
+            && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
             && action == AttemptToCache
             && !structure->needImpurePropertyWatchpoint()
             && !loadTargetFromProxy) {
             structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
             repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdOptimize, true);
-            stubInfo.initGetByIdSelf(vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
+            stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
             return RetryCacheLater;
         }
 
         PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
-        
-        ObjectPropertyConditionSet conditionSet;
+
         if (slot.isUnset() || slot.slotBase() != baseValue) {
             if (structure->typeInfo().prohibitsPropertyCaching() || structure->isDictionary())
                 return GiveUpOnCache;
             
+            if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
+                return GiveUpOnCache;
+
             if (slot.isUnset()) {
                 conditionSet = generateConditionsForPropertyMiss(
-                    vm, codeBlock->ownerExecutable(), exec, structure, propertyName.impl());
+                    vm, codeBlock, exec, structure, propertyName.impl());
             } else {
                 conditionSet = generateConditionsForPrototypePropertyHit(
-                    vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(),
+                    vm, codeBlock, exec, structure, slot.slotBase(),
                     propertyName.impl());
             }
             
             if (!conditionSet.isValid())
                 return GiveUpOnCache;
-            
+
             offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
         }
 
-        AccessCase::AccessType type;
-        if (slot.isCacheableValue())
-            type = AccessCase::Load;
-        else if (slot.isUnset())
-            type = AccessCase::Miss;
-        else if (slot.isCacheableGetter())
-            type = AccessCase::Getter;
-        else
-            type = AccessCase::CustomGetter;
+        JSFunction* getter = nullptr;
+        if (slot.isCacheableGetter())
+            getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
 
-        newCase = AccessCase::get(
-            vm, owner, type, offset, structure, conditionSet, loadTargetFromProxy,
-            slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
-            slot.isCacheableCustom() ? slot.slotBase() : nullptr);
+        if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
+            newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
+        else {
+            AccessCase::AccessType type;
+            if (slot.isCacheableValue())
+                type = AccessCase::Load;
+            else if (slot.isUnset())
+                type = AccessCase::Miss;
+            else if (slot.isCacheableGetter())
+                type = AccessCase::Getter;
+            else
+                type = AccessCase::CustomGetter;
+
+            newCase = AccessCase::get(
+                vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
+                slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
+                slot.isCacheableCustom() ? slot.slotBase() : nullptr);
+        }
     }
 
-    MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
-        vm, codeBlock, propertyName, WTF::move(newCase));
+    MacroAssemblerCodePtr codePtr =
+        stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
 
     if (!codePtr)
         return GiveUpOnCache;
@@ -356,7 +362,6 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
         return GiveUpOnCache;
     
     CodeBlock* codeBlock = exec->codeBlock();
-    ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
     VM& vm = exec->vm();
 
     if (!baseValue.isCell())
@@ -374,20 +379,20 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
         if (slot.type() == PutPropertySlot::ExistingProperty) {
             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
         
-            ptrdiff_t offsetToPatchedStorage = offsetRelativeToPatchedStorage(slot.cachedOffset());
             if (stubInfo.cacheType == CacheType::Unset
-                && MacroAssembler::isPtrAlignedAddressOffset(offsetToPatchedStorage)
-                && !structure->needImpurePropertyWatchpoint()) {
+                && isInlineOffset(slot.cachedOffset())
+                && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
+                && !structure->needImpurePropertyWatchpoint()
+                && !structure->inferredTypeFor(ident.impl())) {
 
                 repatchByIdSelfAccess(
                     codeBlock, stubInfo, structure, slot.cachedOffset(),
                     appropriateOptimizingPutByIdFunction(slot, putKind), false);
-                stubInfo.initPutByIdReplace(
-                    vm, codeBlock->ownerExecutable(), structure, slot.cachedOffset());
+                stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
                 return RetryCacheLater;
             }
 
-            newCase = AccessCase::replace(vm, owner, structure, slot.cachedOffset());
+            newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
         } else {
             ASSERT(slot.type() == PutPropertySlot::NewProperty);
 
@@ -409,12 +414,12 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
             if (putKind == NotDirect) {
                 conditionSet =
                     generateConditionsForPropertySetterMiss(
-                        vm, owner, exec, newStructure, ident.impl());
+                        vm, codeBlock, exec, newStructure, ident.impl());
                 if (!conditionSet.isValid())
                     return GiveUpOnCache;
             }
 
-            newCase = AccessCase::transition(vm, owner, structure, newStructure, offset, conditionSet);
+            newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
         }
     } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
         if (slot.isCacheableCustom()) {
@@ -423,13 +428,13 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
             if (slot.base() != baseValue) {
                 conditionSet =
                     generateConditionsForPrototypePropertyHitCustom(
-                        vm, owner, exec, structure, slot.base(), ident.impl());
+                        vm, codeBlock, exec, structure, slot.base(), ident.impl());
                 if (!conditionSet.isValid())
                     return GiveUpOnCache;
             }
 
             newCase = AccessCase::setter(
-                vm, owner, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
+                vm, codeBlock, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
                 slot.customSetter(), slot.base());
         } else {
             ObjectPropertyConditionSet conditionSet;
@@ -438,7 +443,7 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
             if (slot.base() != baseValue) {
                 conditionSet =
                     generateConditionsForPrototypePropertyHit(
-                        vm, owner, exec, structure, slot.base(), ident.impl());
+                        vm, codeBlock, exec, structure, slot.base(), ident.impl());
                 if (!conditionSet.isValid())
                     return GiveUpOnCache;
                 offset = conditionSet.slotBaseCondition().offset();
@@ -446,12 +451,11 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
                 offset = slot.cachedOffset();
 
             newCase = AccessCase::setter(
-                vm, owner, AccessCase::Setter, structure, offset, conditionSet);
+                vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
         }
     }
 
-    MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(
-        vm, codeBlock, ident, WTF::move(newCase));
+    MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
     
     if (!codePtr)
         return GiveUpOnCache;
@@ -480,7 +484,7 @@ static InlineCacheAction tryRepatchIn(
     if (forceICFailure(exec))
         return GiveUpOnCache;
     
-    if (!base->structure()->propertyAccessesAreCacheable())
+    if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
         return GiveUpOnCache;
     
     if (wasFound) {
@@ -489,7 +493,6 @@ static InlineCacheAction tryRepatchIn(
     }
     
     CodeBlock* codeBlock = exec->codeBlock();
-    ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
     VM& vm = exec->vm();
     Structure* structure = base->structure(vm);
     
@@ -497,19 +500,19 @@ static InlineCacheAction tryRepatchIn(
     if (wasFound) {
         if (slot.slotBase() != base) {
             conditionSet = generateConditionsForPrototypePropertyHit(
-                vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
+                vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
         }
     } else {
         conditionSet = generateConditionsForPropertyMiss(
-            vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
+            vm, codeBlock, exec, structure, ident.impl());
     }
     if (!conditionSet.isValid())
         return GiveUpOnCache;
 
     std::unique_ptr<AccessCase> newCase = AccessCase::in(
-        vm, owner, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
+        vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
 
-    MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(vm, codeBlock, ident, WTF::move(newCase));
+    MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
     if (!codePtr)
         return GiveUpOnCache;
 
@@ -556,9 +559,9 @@ void linkFor(
     VM* vm = callerCodeBlock->vm();
     
     ASSERT(!callLinkInfo.isLinked());
-    callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
-    callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
-    if (shouldShowDisassemblyFor(callerCodeBlock))
+    callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
+    callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
+    if (shouldDumpDisassemblyFor(callerCodeBlock))
         dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
     MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
     
@@ -598,7 +601,7 @@ static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef
 
 void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
 {
-    if (Options::showDisassembly())
+    if (Options::dumpDisassembly())
         dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
     
     revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
@@ -610,7 +613,7 @@ void linkVirtualFor(
     CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
     VM* vm = callerCodeBlock->vm();
 
-    if (shouldShowDisassemblyFor(callerCodeBlock))
+    if (shouldDumpDisassemblyFor(callerCodeBlock))
         dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
     
     MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
@@ -678,9 +681,9 @@ void linkPolymorphicCall(
             else
 #endif
                 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
-            // If we cannot handle a callee, assume that it's better for this whole thing to be a
-            // virtual call.
-            if (exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
+            // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
+            // assume that it's better for this whole thing to be a virtual call.
+            if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
                 linkVirtualFor(exec, callLinkInfo);
                 return;
             }
@@ -706,26 +709,32 @@ void linkPolymorphicCall(
     
     CCallHelpers::JumpList slowPath;
     
-    ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
-
-    if (!ASSERT_DISABLED) {
-        CCallHelpers::Jump okArgumentCount = stubJit.branch32(
-            CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
-        stubJit.abortWithReason(RepatchInsaneArgumentCount);
-        okArgumentCount.link(&stubJit);
+    std::unique_ptr<CallFrameShuffler> frameShuffler;
+    if (callLinkInfo.frameShuffleData()) {
+        ASSERT(callLinkInfo.isTailCall());
+        frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
+#if USE(JSVALUE32_64)
+        // We would have already checked that the callee is a cell, and we can
+        // use the additional register this buys us.
+        frameShuffler->assumeCalleeIsCell();
+#endif
+        frameShuffler->lockGPR(calleeGPR);
     }
-    
-    GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
     GPRReg comparisonValueGPR;
     
     if (isClosureCall) {
-        // Verify that we have a function and stash the executable in scratch.
+        GPRReg scratchGPR;
+        if (frameShuffler)
+            scratchGPR = frameShuffler->acquireGPR();
+        else
+            scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
+        // Verify that we have a function and stash the executable in scratchGPR.
 
 #if USE(JSVALUE64)
-        // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
-        // being set. So we do this the hard way.
-        stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
-        slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
+        // We can't rely on tagMaskRegister being set, so we do this the hard
+        // way.
+        stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratchGPR);
+        slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratchGPR));
 #else
         // We would have already checked that the callee is a cell.
 #endif
@@ -738,9 +747,9 @@ void linkPolymorphicCall(
     
         stubJit.loadPtr(
             CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
-            scratch);
+            scratchGPR);
         
-        comparisonValueGPR = scratch;
+        comparisonValueGPR = scratchGPR;
     } else
         comparisonValueGPR = calleeGPR;
     
@@ -782,10 +791,16 @@ void linkPolymorphicCall(
         caseValues[i] = newCaseValue;
     }
     
-    GPRReg fastCountsBaseGPR =
-        AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
+    GPRReg fastCountsBaseGPR;
+    if (frameShuffler)
+        fastCountsBaseGPR = frameShuffler->acquireGPR();
+    else {
+        fastCountsBaseGPR =
+            AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
+    }
     stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
-    
+    if (!frameShuffler && callLinkInfo.isTailCall())
+        stubJit.emitRestoreCalleeSaves();
     BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
     CCallHelpers::JumpList done;
     while (binarySwitch.advance(stubJit)) {
@@ -795,15 +810,17 @@ void linkPolymorphicCall(
         
         ASSERT(variant.executable()->hasJITCodeForCall());
         MacroAssemblerCodePtr codePtr =
-            variant.executable()->generatedJITCodeForCall()->addressForCall(
-                *vm, variant.executable(), ArityCheckNotRequired, callLinkInfo.registerPreservationMode());
+            variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
         
         if (fastCounts) {
             stubJit.add32(
                 CCallHelpers::TrustedImm32(1),
                 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
         }
-        if (callLinkInfo.isTailCall()) {
+        if (frameShuffler) {
+            CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
+            calls[caseIndex].call = stubJit.nearTailCall();
+        } else if (callLinkInfo.isTailCall()) {
             stubJit.prepareForTailCallSlow();
             calls[caseIndex].call = stubJit.nearTailCall();
         } else
@@ -814,10 +831,23 @@ void linkPolymorphicCall(
     
     slowPath.link(&stubJit);
     binarySwitch.fallThrough().link(&stubJit);
-    stubJit.move(calleeGPR, GPRInfo::regT0);
+
+    if (frameShuffler) {
+        frameShuffler->releaseGPR(calleeGPR);
+        frameShuffler->releaseGPR(comparisonValueGPR);
+        frameShuffler->releaseGPR(fastCountsBaseGPR);
 #if USE(JSVALUE32_64)
-    stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
+        frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
+#else
+        frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
 #endif
+        frameShuffler->prepareForSlowPath();
+    } else {
+        stubJit.move(calleeGPR, GPRInfo::regT0);
+#if USE(JSVALUE32_64)
+        stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
+#endif
+    }
     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
     stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
     
@@ -847,8 +877,8 @@ void linkPolymorphicCall(
             ("Polymorphic call stub for %s, return point %p, targets %s",
                 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
                 toCString(listDump(callCases)).data())),
-        *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
-        WTF::move(fastCounts)));
+        *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
+        WTFMove(fastCounts)));
     
     MacroAssembler::replaceWithJump(
         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),